aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/.gitignore8
-rw-r--r--lib/Makefile2
-rw-r--r--lib/asn1/doc/src/Makefile3
-rw-r--r--lib/asn1/doc/src/asn1_getting_started.xml81
-rw-r--r--lib/asn1/doc/src/asn1ct.xml62
-rw-r--r--lib/asn1/doc/src/asn1rt.xml135
-rw-r--r--lib/asn1/examples/recordnames.txt2
-rw-r--r--lib/asn1/src/Makefile1
-rw-r--r--lib/asn1/src/asn1.app.src1
-rw-r--r--lib/asn1/src/asn1_db.erl26
-rw-r--r--lib/asn1/src/asn1_records.hrl23
-rw-r--r--lib/asn1/src/asn1ct.erl250
-rw-r--r--lib/asn1/src/asn1ct_check.erl31
-rw-r--r--lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl300
-rw-r--r--lib/asn1/src/asn1ct_constructed_per.erl933
-rw-r--r--lib/asn1/src/asn1ct_eval_ext.funcs1
-rw-r--r--lib/asn1/src/asn1ct_gen.erl675
-rw-r--r--lib/asn1/src/asn1ct_gen_ber_bin_v2.erl12
-rw-r--r--lib/asn1/src/asn1ct_gen_check.erl191
-rw-r--r--lib/asn1/src/asn1ct_gen_per.erl12
-rw-r--r--lib/asn1/src/asn1ct_imm.erl95
-rw-r--r--lib/asn1/src/asn1ct_value.erl31
-rw-r--r--lib/asn1/src/asn1rt.erl184
-rw-r--r--lib/asn1/src/asn1rtt_ext.erl62
-rw-r--r--lib/asn1/src/asn1rtt_per_common.erl2
-rw-r--r--lib/asn1/test/Makefile4
-rw-r--r--lib/asn1/test/asn1_SUITE.erl166
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Maps.asn117
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Prim.asn12
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SeqExtension.asn111
-rw-r--r--lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl4
-rw-r--r--lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Descriptions.asn (renamed from lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Discriptions.asn)2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/test_records.erl2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/testobj.erl12
-rw-r--r--lib/asn1/test/asn1_app_SUITE.erl (renamed from lib/asn1/test/asn1_app_test.erl)58
-rw-r--r--lib/asn1/test/asn1_appup_test.erl58
-rw-r--r--lib/asn1/test/asn1_test_lib.erl105
-rw-r--r--lib/asn1/test/ber_decode_error.erl39
-rw-r--r--lib/asn1/test/h323test.erl29
-rw-r--r--lib/asn1/test/testChoPrim.erl8
-rw-r--r--lib/asn1/test/testContextSwitchingTypes.erl1
-rw-r--r--lib/asn1/test/testInfObj.erl1
-rw-r--r--lib/asn1/test/testInfObjectClass.erl22
-rw-r--r--lib/asn1/test/testMaps.erl50
-rw-r--r--lib/asn1/test/testMultipleLevels.erl6
-rw-r--r--lib/asn1/test/testNBAPsystem.erl14
-rw-r--r--lib/asn1/test/testPrim.erl47
-rw-r--r--lib/asn1/test/testPrimStrings.erl22
-rw-r--r--lib/asn1/test/testRfcs.erl50
-rw-r--r--lib/asn1/test/testSeqExtension.erl38
-rw-r--r--lib/asn1/test/testTCAP.erl1
-rw-r--r--lib/asn1/test/testTimer.erl131
-rw-r--r--lib/asn1/test/testUniqueObjectSets.erl1
-rw-r--r--lib/asn1/test/test_compile_options.erl28
-rw-r--r--lib/common_test/doc/src/Makefile3
-rw-r--r--lib/common_test/doc/src/common_test_app.xml26
-rw-r--r--lib/common_test/doc/src/ct_hooks.xml90
-rw-r--r--lib/common_test/doc/src/ct_hooks_chapter.xml74
-rw-r--r--lib/common_test/doc/src/ct_testspec.xml84
-rw-r--r--lib/common_test/doc/src/ref_man.xml1
-rw-r--r--lib/common_test/doc/src/write_test_chapter.xml2
-rw-r--r--lib/common_test/src/common_test.app.src1
-rw-r--r--lib/common_test/src/ct_framework.erl161
-rw-r--r--lib/common_test/src/ct_groups.erl14
-rw-r--r--lib/common_test/src/ct_hooks.erl141
-rw-r--r--lib/common_test/src/ct_release_test.erl2
-rw-r--r--lib/common_test/src/ct_run.erl27
-rw-r--r--lib/common_test/src/ct_testspec.erl35
-rw-r--r--lib/common_test/src/cth_conn_log.erl8
-rw-r--r--lib/common_test/src/cth_log_redirect.erl28
-rw-r--r--lib/common_test/src/cth_surefire.erl54
-rw-r--r--lib/common_test/src/test_server.erl10
-rw-r--r--lib/common_test/src/test_server_ctrl.erl116
-rw-r--r--lib/common_test/test/Makefile3
-rw-r--r--lib/common_test/test/ct_SUITE.erl53
-rw-r--r--lib/common_test/test/ct_error_SUITE.erl8
-rw-r--r--lib/common_test/test/ct_hooks_SUITE.erl1201
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/all_hook_callbacks_SUITE.erl62
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/config_clash_SUITE.erl (renamed from lib/percept/src/percept.appup.src)39
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_end_config_SUITE.erl51
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_config_SUITE.erl54
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_suite_config_SUITE.erl (renamed from lib/percept/src/egd.hrl)40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl122
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl32
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fallback_cth.erl81
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl6
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/repeat_SUITE.erl (renamed from lib/typer/src/typer.appup.src)32
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/seq_SUITE.erl (renamed from lib/percept/src/percept.app.src)58
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip.spec8
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_case_SUITE.erl106
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_cth.erl182
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_fail_SUITE.erl53
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_group_SUITE.erl64
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_init_SUITE.erl53
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_init_tc_cth.erl79
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_req_SUITE.erl53
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl40
-rw-r--r--lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl40
-rw-r--r--lib/common_test/test/ct_repeat_testrun_SUITE.erl5
-rw-r--r--lib/common_test/test/ct_surefire_SUITE.erl129
-rw-r--r--lib/common_test/test/ct_surefire_SUITE_data/skip_one_case.spec2
-rw-r--r--lib/common_test/test/ct_surefire_SUITE_data/skip_one_suite.spec2
-rw-r--r--lib/common_test/test/ct_test_server_if_1_SUITE.erl1
-rw-r--r--lib/common_test/test/ct_test_support.erl24
-rw-r--r--lib/common_test/test/ct_testspec_2_SUITE.erl82
-rw-r--r--lib/compiler/doc/src/compile.xml13
-rw-r--r--lib/compiler/src/Makefile2
-rw-r--r--lib/compiler/src/beam_a.erl3
-rw-r--r--lib/compiler/src/beam_asm.erl68
-rw-r--r--lib/compiler/src/beam_block.erl3
-rw-r--r--lib/compiler/src/beam_bs.erl3
-rw-r--r--lib/compiler/src/beam_bsm.erl38
-rw-r--r--lib/compiler/src/beam_clean.erl20
-rw-r--r--lib/compiler/src/beam_dead.erl4
-rw-r--r--lib/compiler/src/beam_dict.erl32
-rw-r--r--lib/compiler/src/beam_except.erl9
-rw-r--r--lib/compiler/src/beam_flatten.erl3
-rw-r--r--lib/compiler/src/beam_jump.erl17
-rw-r--r--lib/compiler/src/beam_listing.erl14
-rw-r--r--lib/compiler/src/beam_peep.erl3
-rw-r--r--lib/compiler/src/beam_receive.erl3
-rw-r--r--lib/compiler/src/beam_reorder.erl3
-rw-r--r--lib/compiler/src/beam_split.erl3
-rw-r--r--lib/compiler/src/beam_trim.erl7
-rw-r--r--lib/compiler/src/beam_type.erl3
-rw-r--r--lib/compiler/src/beam_utils.erl58
-rw-r--r--lib/compiler/src/beam_validator.erl4
-rw-r--r--lib/compiler/src/beam_z.erl3
-rw-r--r--lib/compiler/src/cerl.erl12
-rw-r--r--lib/compiler/src/compile.erl89
-rw-r--r--lib/compiler/src/core_scan.erl24
-rw-r--r--lib/compiler/src/sys_core_fold.erl6
-rw-r--r--lib/compiler/src/sys_pre_attributes.erl48
-rw-r--r--lib/compiler/src/v3_codegen.erl4
-rw-r--r--lib/compiler/src/v3_core.erl31
-rw-r--r--lib/compiler/src/v3_kernel_pp.erl2
-rw-r--r--lib/compiler/src/v3_life.erl9
-rw-r--r--lib/compiler/src/v3_life.hrl8
-rw-r--r--lib/compiler/test/compile_SUITE.erl40
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple.erl5
-rw-r--r--lib/compiler/test/lc_SUITE.erl11
-rw-r--r--lib/compiler/test/map_SUITE.erl1
-rw-r--r--lib/crypto/c_src/crypto.c74
-rw-r--r--lib/crypto/c_src/crypto_callback.h2
-rw-r--r--lib/crypto/doc/src/crypto.xml2
-rw-r--r--lib/crypto/src/crypto.erl19
-rw-r--r--lib/debugger/test/map_SUITE.erl22
-rw-r--r--lib/dialyzer/RELEASE_NOTES2
-rw-r--r--lib/dialyzer/src/dialyzer.app.src4
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl132
-rw-r--r--lib/dialyzer/src/dialyzer_behaviours.erl13
-rw-r--r--lib/dialyzer/src/dialyzer_callgraph.erl153
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl46
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl279
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl118
-rw-r--r--lib/dialyzer/src/dialyzer_coordinator.erl65
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl40
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.erl3
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl164
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl41
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl153
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl185
-rw-r--r--lib/dialyzer/src/dialyzer_worker.erl53
-rw-r--r--lib/dialyzer/test/abstract_SUITE.erl6
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/map_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/map_galore8
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/map_galore.erl24
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/results/weird6
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl18
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl14
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl19
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/results/compiler2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl6
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl2
-rw-r--r--lib/dialyzer/test/plt_SUITE.erl27
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/results/mnesia1
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl6
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl10
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl8
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/results/chars4
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/anno.erl18
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/chars.erl32
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/tuple1.erl2
-rw-r--r--lib/diameter/include/diameter_gen.hrl2
-rw-r--r--lib/diameter/src/base/diameter.erl3
-rw-r--r--lib/diameter/src/base/diameter_callback.erl2
-rw-r--r--lib/diameter/src/base/diameter_config.erl7
-rw-r--r--lib/diameter/src/base/diameter_peer_fsm.erl73
-rw-r--r--lib/diameter/src/base/diameter_service.erl18
-rw-r--r--lib/diameter/src/base/diameter_sup.erl4
-rw-r--r--lib/diameter/src/base/diameter_traffic.erl263
-rw-r--r--lib/diameter/src/base/diameter_watchdog.erl28
-rw-r--r--lib/diameter/src/diameter.appup.src14
-rw-r--r--lib/diameter/src/info/diameter_info.erl2
-rw-r--r--lib/diameter/src/transport/diameter_sctp.erl2
-rw-r--r--lib/diameter/test/diameter_pool_SUITE.erl2
-rw-r--r--lib/diameter/vsn.mk4
-rw-r--r--lib/edoc/src/edoc_tags.erl2
-rw-r--r--lib/eldap/test/README2
-rw-r--r--lib/erl_interface/doc/src/erl_call.xml2
-rw-r--r--lib/erl_interface/src/README2
-rw-r--r--lib/erl_interface/src/legacy/erl_marshal.c4
-rw-r--r--lib/erl_interface/src/misc/ei_locking.c4
-rw-r--r--lib/erl_interface/test/ei_decode_SUITE.erl2
-rw-r--r--lib/erl_interface/test/erl_eterm_SUITE.erl2
-rw-r--r--lib/eunit/doc/src/notes.xml2
-rw-r--r--lib/hipe/amd64/Makefile1
-rw-r--r--lib/hipe/amd64/hipe_amd64_encode.erl2
-rw-r--r--lib/hipe/amd64/hipe_amd64_registers.erl27
-rw-r--r--lib/hipe/cerl/cerl_to_icode.erl2
-rw-r--r--lib/hipe/cerl/erl_types.erl339
-rw-r--r--lib/hipe/doc/src/notes.xml2
-rw-r--r--lib/hipe/flow/cfg.inc2
-rw-r--r--lib/hipe/flow/ebb.inc14
-rw-r--r--lib/hipe/flow/hipe_dominators.erl2
-rw-r--r--lib/hipe/icode/hipe_beam_to_icode.erl31
-rw-r--r--lib/hipe/icode/hipe_icode_type.erl21
-rw-r--r--lib/hipe/llvm/hipe_llvm.erl40
-rw-r--r--lib/hipe/llvm/hipe_rtl_to_llvm.erl6
-rw-r--r--lib/hipe/main/hipe.erl6
-rw-r--r--lib/hipe/opt/hipe_schedule.erl4
-rw-r--r--lib/hipe/opt/hipe_spillmin_color.erl2
-rw-r--r--lib/hipe/regalloc/hipe_amd64_specific_sse2.erl4
-rw-r--r--lib/hipe/rtl/hipe_icode2rtl.erl12
-rw-r--r--lib/hipe/rtl/hipe_rtl_binary_construct.erl170
-rw-r--r--lib/hipe/rtl/hipe_tagscheme.erl106
-rw-r--r--lib/hipe/test/basic_SUITE_data/basic_tuples.erl14
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl1
-rw-r--r--lib/hipe/x86/hipe_rtl_to_x86.erl1
-rw-r--r--lib/hipe/x86/hipe_x86_assemble.erl29
-rw-r--r--lib/hipe/x86/hipe_x86_postpass.erl8
-rw-r--r--lib/inets/doc/src/notes.xml19
-rw-r--r--lib/inets/src/ftp/ftp.erl53
-rw-r--r--lib/inets/src/http_client/httpc_response.erl2
-rw-r--r--lib/inets/src/http_server/httpd_request_handler.erl4
-rw-r--r--lib/inets/src/inets_app/inets.appup.src4
-rw-r--r--lib/inets/test/httpd_1_1.erl4
-rw-r--r--lib/inets/test/httpd_test_data/server_root/conf/httpd.conf2
-rw-r--r--lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf2
-rw-r--r--lib/inets/vsn.mk2
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java2
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java2
-rw-r--r--lib/kernel/doc/src/code.xml2
-rw-r--r--lib/kernel/doc/src/config.xml4
-rw-r--r--lib/kernel/doc/src/kernel_app.xml81
-rw-r--r--lib/kernel/doc/src/notes.xml4
-rw-r--r--lib/kernel/doc/src/os.xml31
-rw-r--r--lib/kernel/doc/src/seq_trace.xml6
-rw-r--r--lib/kernel/include/inet.hrl2
-rw-r--r--lib/kernel/src/Makefile1
-rw-r--r--lib/kernel/src/code.erl12
-rw-r--r--lib/kernel/src/dist_ac.erl10
-rw-r--r--lib/kernel/src/erl_signal_handler.erl57
-rw-r--r--lib/kernel/src/error_logger.erl8
-rw-r--r--lib/kernel/src/erts_debug.erl25
-rw-r--r--lib/kernel/src/file.erl2
-rw-r--r--lib/kernel/src/inet_parse.erl4
-rw-r--r--lib/kernel/src/inet_udp.erl6
-rw-r--r--lib/kernel/src/kernel.app.src1
-rw-r--r--lib/kernel/src/kernel.appup.src4
-rw-r--r--lib/kernel/src/kernel.erl13
-rw-r--r--lib/kernel/src/os.erl35
-rw-r--r--lib/kernel/src/rpc.erl14
-rw-r--r--lib/kernel/test/application_SUITE.erl2
-rw-r--r--lib/kernel/test/code_SUITE.erl41
-rw-r--r--lib/kernel/test/erl_distribution_SUITE.erl4
-rw-r--r--lib/kernel/test/erl_distribution_wb_SUITE.erl2
-rw-r--r--lib/kernel/test/error_logger_SUITE.erl13
-rw-r--r--lib/kernel/test/file_SUITE.erl2
-rw-r--r--lib/kernel/test/file_SUITE_data/realmen.html4
-rw-r--r--lib/kernel/test/multi_load_SUITE.erl8
-rw-r--r--lib/kernel/test/rpc_SUITE.erl12
-rw-r--r--lib/megaco/src/text/megaco_text_gen_prev3a.hrl2
-rw-r--r--lib/megaco/src/text/megaco_text_gen_prev3b.hrl2
-rw-r--r--lib/megaco/src/text/megaco_text_gen_prev3c.hrl2
-rw-r--r--lib/mnesia/doc/src/Mnesia_chap5.xmlsrc5
-rw-r--r--lib/mnesia/doc/src/notes.xml18
-rw-r--r--lib/mnesia/src/Makefile1
-rw-r--r--lib/mnesia/src/mnesia.app.src1
-rw-r--r--lib/mnesia/src/mnesia.erl1
-rw-r--r--lib/mnesia/src/mnesia.hrl7
-rw-r--r--lib/mnesia/src/mnesia_checkpoint.erl7
-rw-r--r--lib/mnesia/src/mnesia_event.erl3
-rw-r--r--lib/mnesia/src/mnesia_frag.erl76
-rw-r--r--lib/mnesia/src/mnesia_frag_old_hash.erl133
-rw-r--r--lib/mnesia/src/mnesia_monitor.erl2
-rw-r--r--lib/mnesia/src/mnesia_schema.erl2
-rw-r--r--lib/mnesia/test/mnesia_evil_backup.erl43
-rw-r--r--lib/mnesia/vsn.mk2
-rw-r--r--lib/observer/src/cdv_bin_cb.erl2
-rw-r--r--lib/observer/src/cdv_detail_wx.erl2
-rw-r--r--lib/observer/src/observer_app_wx.erl8
-rw-r--r--lib/observer/src/observer_lib.erl12
-rw-r--r--lib/observer/src/observer_port_wx.erl27
-rw-r--r--lib/observer/src/observer_procinfo.erl2
-rw-r--r--lib/observer/src/observer_tv_wx.erl9
-rw-r--r--lib/observer/src/observer_wx.erl10
-rw-r--r--lib/observer/test/crashdump_helper.erl2
-rw-r--r--lib/observer/test/observer_SUITE.erl17
-rw-r--r--lib/orber/src/cdr_encode.erl2
-rw-r--r--lib/orber/src/orber_iiop.hrl4
-rw-r--r--lib/orber/src/orber_initial_references.erl2
-rw-r--r--lib/orber/src/orber_objectkeys.erl2
-rw-r--r--lib/os_mon/src/memsup.erl1
-rw-r--r--lib/parsetools/src/leex.erl4
-rw-r--r--lib/percept/AUTHORS4
-rw-r--r--lib/percept/Makefile35
-rw-r--r--lib/percept/c_src/.gitignore0
-rw-r--r--lib/percept/doc/html/.gitignore0
-rw-r--r--lib/percept/doc/man3/.gitignore0
-rw-r--r--lib/percept/doc/pdf/.gitignore0
-rw-r--r--lib/percept/doc/src/Makefile190
-rw-r--r--lib/percept/doc/src/book.xml52
-rw-r--r--lib/percept/doc/src/egd_ug.xmlsrc90
-rw-r--r--lib/percept/doc/src/fascicules.xml18
-rw-r--r--lib/percept/doc/src/img.erl50
-rw-r--r--lib/percept/doc/src/img_esi.erl25
-rw-r--r--lib/percept/doc/src/img_esi_result.gifbin374 -> 0 bytes
-rw-r--r--lib/percept/doc/src/ipc_tree.erl30
-rw-r--r--lib/percept/doc/src/notes.xml495
-rw-r--r--lib/percept/doc/src/part.xml47
-rw-r--r--lib/percept/doc/src/part_notes.xml41
-rw-r--r--lib/percept/doc/src/percept_compare.gifbin241343 -> 0 bytes
-rw-r--r--lib/percept/doc/src/percept_examples.html11
-rw-r--r--lib/percept/doc/src/percept_overview.gifbin158719 -> 0 bytes
-rw-r--r--lib/percept/doc/src/percept_processes.gifbin182273 -> 0 bytes
-rw-r--r--lib/percept/doc/src/percept_processinfo.gifbin135512 -> 0 bytes
-rw-r--r--lib/percept/doc/src/percept_ug.xmlsrc223
-rw-r--r--lib/percept/doc/src/ref_man.xml48
-rw-r--r--lib/percept/doc/src/sorter.erl41
-rw-r--r--lib/percept/doc/src/test1.gifbin951 -> 0 bytes
-rw-r--r--lib/percept/doc/src/test2.gifbin1035 -> 0 bytes
-rw-r--r--lib/percept/doc/src/test3.gifbin2382 -> 0 bytes
-rw-r--r--lib/percept/doc/src/test4.gifbin2294 -> 0 bytes
-rw-r--r--lib/percept/doc/stylesheet.css39
-rw-r--r--lib/percept/ebin/.gitignore0
-rw-r--r--lib/percept/include/.gitignore0
-rw-r--r--lib/percept/info2
-rw-r--r--lib/percept/priv/Makefile97
-rw-r--r--lib/percept/priv/fonts/6x11_latin1.wingsfontbin2016 -> 0 bytes
-rw-r--r--lib/percept/priv/logs/.gitignore0
-rw-r--r--lib/percept/priv/obj/.gitignore0
-rw-r--r--lib/percept/priv/server_root/cgi-bin/.gitignore0
-rw-r--r--lib/percept/priv/server_root/conf/mime.types462
-rw-r--r--lib/percept/priv/server_root/css/percept.css162
-rw-r--r--lib/percept/priv/server_root/htdocs/index.html41
-rw-r--r--lib/percept/priv/server_root/images/nav.pngbin560 -> 0 bytes
-rw-r--r--lib/percept/priv/server_root/images/white.pngbin69 -> 0 bytes
-rw-r--r--lib/percept/priv/server_root/scripts/percept_area_select.js182
-rw-r--r--lib/percept/priv/server_root/scripts/percept_error_handler.js26
-rw-r--r--lib/percept/priv/server_root/scripts/percept_select_all.js28
-rw-r--r--lib/percept/src/Makefile108
-rw-r--r--lib/percept/src/egd.erl275
-rw-r--r--lib/percept/src/egd_font.erl173
-rw-r--r--lib/percept/src/egd_png.erl105
-rw-r--r--lib/percept/src/egd_primitives.erl412
-rw-r--r--lib/percept/src/egd_render.erl664
-rw-r--r--lib/percept/src/percept.erl337
-rw-r--r--lib/percept/src/percept.hrl53
-rw-r--r--lib/percept/src/percept_analyzer.erl368
-rw-r--r--lib/percept/src/percept_db.erl780
-rw-r--r--lib/percept/src/percept_graph.erl134
-rw-r--r--lib/percept/src/percept_html.erl707
-rw-r--r--lib/percept/src/percept_image.erl316
-rw-r--r--lib/percept/test/Makefile91
-rw-r--r--lib/percept/test/egd_SUITE.erl389
-rw-r--r--lib/percept/test/ipc_tree.erl49
-rw-r--r--lib/percept/test/percept.cover2
-rw-r--r--lib/percept/test/percept.spec1
-rw-r--r--lib/percept/test/percept_SUITE.erl126
-rw-r--r--lib/percept/test/percept_SUITE_data/ipc-dist.datbin2098105 -> 0 bytes
-rw-r--r--lib/percept/test/percept_db_SUITE.erl55
-rw-r--r--lib/percept/vsn.mk1
-rw-r--r--lib/public_key/asn1/PKCS-8.asn12
-rw-r--r--lib/public_key/doc/src/public_key.xml38
-rw-r--r--lib/public_key/doc/src/using_public_key.xml253
-rw-r--r--lib/public_key/src/public_key.erl197
-rw-r--r--lib/public_key/test/public_key_SUITE.erl145
-rw-r--r--lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_cn.pem17
-rw-r--r--lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem14
-rw-r--r--lib/public_key/test/public_key_SUITE_data/verify_hostname.conf16
-rw-r--r--lib/reltool/src/reltool.hrl6
-rw-r--r--lib/runtime_tools/doc/src/LTTng.xml2
-rw-r--r--lib/runtime_tools/src/Makefile1
-rw-r--r--lib/runtime_tools/src/percept_profile.erl195
-rw-r--r--lib/runtime_tools/src/runtime_tools.app.src6
-rw-r--r--lib/sasl/doc/src/systools.xml6
-rw-r--r--lib/sasl/src/release_handler.erl6
-rw-r--r--lib/sasl/src/systools_make.erl172
-rw-r--r--lib/sasl/src/systools_relup.erl150
-rw-r--r--lib/sasl/test/systools_SUITE.erl116
-rw-r--r--lib/snmp/src/app/snmp.appup.src8
-rw-r--r--lib/snmp/src/app/snmp.erl80
-rw-r--r--lib/snmp/src/compile/snmpc_lib.erl4
-rw-r--r--lib/snmp/src/compile/snmpc_mib_gram.yrl6
-rw-r--r--lib/snmp/test/snmp_compiler_test.erl32
-rw-r--r--lib/snmp/test/snmp_manager_test.erl12
-rw-r--r--lib/snmp/test/snmp_test_data/OTP14145-MIB.mib44
-rw-r--r--lib/snmp/vsn.mk4
-rw-r--r--lib/ssh/doc/src/ssh.xml61
-rw-r--r--lib/ssh/doc/src/ssh_app.xml9
-rw-r--r--lib/ssh/doc/src/using_ssh.xml2
-rw-r--r--lib/ssh/src/Makefile1
-rw-r--r--lib/ssh/src/ssh.app.src8
-rw-r--r--lib/ssh/src/ssh.erl848
-rw-r--r--lib/ssh/src/ssh.hrl45
-rw-r--r--lib/ssh/src/ssh_acceptor.erl117
-rw-r--r--lib/ssh/src/ssh_acceptor_sup.erl28
-rw-r--r--lib/ssh/src/ssh_auth.erl79
-rw-r--r--lib/ssh/src/ssh_bits.erl46
-rw-r--r--lib/ssh/src/ssh_cli.erl30
-rw-r--r--lib/ssh/src/ssh_connect.hrl4
-rw-r--r--lib/ssh/src/ssh_connection.erl68
-rw-r--r--lib/ssh/src/ssh_connection_handler.erl207
-rw-r--r--lib/ssh/src/ssh_dbg.erl66
-rw-r--r--lib/ssh/src/ssh_file.erl4
-rw-r--r--lib/ssh/src/ssh_io.erl16
-rw-r--r--lib/ssh/src/ssh_options.erl895
-rw-r--r--lib/ssh/src/ssh_sftp.erl49
-rw-r--r--lib/ssh/src/ssh_sftpd.erl55
-rw-r--r--lib/ssh/src/ssh_sftpd_file_api.erl2
-rw-r--r--lib/ssh/src/ssh_subsystem_sup.erl36
-rw-r--r--lib/ssh/src/ssh_system_sup.erl34
-rw-r--r--lib/ssh/src/ssh_transport.erl349
-rw-r--r--lib/ssh/src/ssh_transport.hrl13
-rw-r--r--lib/ssh/src/sshd_sup.erl22
-rw-r--r--lib/ssh/test/property_test/ssh_eqc_encode_decode.erl5
-rw-r--r--lib/ssh/test/ssh_algorithms_SUITE.erl24
-rw-r--r--lib/ssh/test/ssh_basic_SUITE.erl76
-rw-r--r--lib/ssh/test/ssh_benchmark_SUITE.erl22
-rw-r--r--lib/ssh/test/ssh_key_cb.erl4
-rw-r--r--lib/ssh/test/ssh_key_cb_options.erl2
-rw-r--r--lib/ssh/test/ssh_options_SUITE.erl27
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE.erl86
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test4
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli5
-rw-r--r--lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key16
-rw-r--r--lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub5
-rw-r--r--lib/ssh/test/ssh_sftpd_SUITE.erl164
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl3
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa15
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key16
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub5
-rw-r--r--lib/ssh/test/ssh_test_lib.erl7
-rw-r--r--lib/ssh/test/ssh_to_openssh_SUITE.erl7
-rw-r--r--lib/ssh/test/ssh_trpt_test_lib.erl31
-rw-r--r--lib/ssl/doc/src/ssl.xml8
-rw-r--r--lib/ssl/doc/src/ssl_session_cache_api.xml29
-rw-r--r--lib/ssl/src/Makefile15
-rw-r--r--lib/ssl/src/dtls_connection.erl139
-rw-r--r--lib/ssl/src/dtls_handshake.erl8
-rw-r--r--lib/ssl/src/dtls_record.erl2
-rw-r--r--lib/ssl/src/dtls_socket.erl13
-rw-r--r--lib/ssl/src/dtls_udp_listener.erl62
-rw-r--r--lib/ssl/src/dtls_v1.erl15
-rw-r--r--lib/ssl/src/ssl.app.src15
-rw-r--r--lib/ssl/src/ssl.appup.src18
-rw-r--r--lib/ssl/src/ssl.erl107
-rw-r--r--lib/ssl/src/ssl_admin_sup.erl95
-rw-r--r--lib/ssl/src/ssl_certificate.erl16
-rw-r--r--lib/ssl/src/ssl_cipher.erl22
-rw-r--r--lib/ssl/src/ssl_config.erl52
-rw-r--r--lib/ssl/src/ssl_connection.erl89
-rw-r--r--lib/ssl/src/ssl_connection_sup.erl101
-rw-r--r--lib/ssl/src/ssl_crl.erl76
-rw-r--r--lib/ssl/src/ssl_dist_admin_sup.erl74
-rw-r--r--lib/ssl/src/ssl_dist_connection_sup.erl79
-rw-r--r--lib/ssl/src/ssl_dist_sup.erl42
-rw-r--r--lib/ssl/src/ssl_handshake.erl58
-rw-r--r--lib/ssl/src/ssl_handshake.hrl3
-rw-r--r--lib/ssl/src/ssl_internal.hrl5
-rw-r--r--lib/ssl/src/ssl_manager.erl209
-rw-r--r--lib/ssl/src/ssl_pem_cache.erl266
-rw-r--r--lib/ssl/src/ssl_pkix_db.erl72
-rw-r--r--lib/ssl/src/ssl_record.erl4
-rw-r--r--lib/ssl/src/ssl_sup.erl86
-rw-r--r--lib/ssl/src/tls_connection.erl65
-rw-r--r--lib/ssl/src/tls_handshake.erl69
-rw-r--r--lib/ssl/src/tls_v1.erl20
-rw-r--r--lib/ssl/test/make_certs.erl4
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl143
-rw-r--r--lib/ssl/test/ssl_bench_SUITE.erl10
-rw-r--r--lib/ssl/test/ssl_certificate_verify_SUITE.erl62
-rw-r--r--lib/ssl/test/ssl_handshake_SUITE.erl9
-rw-r--r--lib/ssl/test/ssl_npn_hello_SUITE.erl4
-rw-r--r--lib/ssl/test/ssl_pem_cache_SUITE.erl4
-rw-r--r--lib/ssl/test/ssl_test_lib.erl243
-rw-r--r--lib/ssl/test/ssl_to_openssl_SUITE.erl57
-rw-r--r--lib/ssl/vsn.mk2
-rw-r--r--lib/stdlib/doc/src/c.xml26
-rw-r--r--lib/stdlib/doc/src/erl_tar.xml72
-rw-r--r--lib/stdlib/doc/src/ets.xml4
-rw-r--r--lib/stdlib/doc/src/filelib.xml54
-rw-r--r--lib/stdlib/doc/src/filename.xml37
-rw-r--r--lib/stdlib/doc/src/gen_fsm.xml5
-rw-r--r--lib/stdlib/doc/src/gen_server.xml5
-rw-r--r--lib/stdlib/doc/src/gen_statem.xml167
-rw-r--r--lib/stdlib/doc/src/notes.xml2
-rw-r--r--lib/stdlib/doc/src/shell.xml10
-rw-r--r--lib/stdlib/doc/src/sys.xml4
-rw-r--r--lib/stdlib/src/Makefile4
-rw-r--r--lib/stdlib/src/base64.erl67
-rw-r--r--lib/stdlib/src/beam_lib.erl54
-rw-r--r--lib/stdlib/src/binary.erl2
-rw-r--r--lib/stdlib/src/c.erl260
-rw-r--r--lib/stdlib/src/dets.erl7
-rw-r--r--lib/stdlib/src/edlin_expand.erl95
-rw-r--r--lib/stdlib/src/erl_eval.erl1
-rw-r--r--lib/stdlib/src/erl_expand_records.erl18
-rw-r--r--lib/stdlib/src/erl_parse.yrl47
-rw-r--r--lib/stdlib/src/erl_tar.erl2562
-rw-r--r--lib/stdlib/src/erl_tar.hrl394
-rw-r--r--lib/stdlib/src/ets.erl18
-rw-r--r--lib/stdlib/src/filelib.erl122
-rw-r--r--lib/stdlib/src/filename.erl140
-rw-r--r--lib/stdlib/src/gen_event.erl2
-rw-r--r--lib/stdlib/src/gen_fsm.erl2
-rw-r--r--lib/stdlib/src/gen_statem.erl867
-rw-r--r--lib/stdlib/src/io_lib.erl2
-rw-r--r--lib/stdlib/src/io_lib_format.erl5
-rw-r--r--lib/stdlib/src/io_lib_pretty.erl121
-rw-r--r--lib/stdlib/src/otp_internal.erl37
-rw-r--r--lib/stdlib/src/proplists.erl2
-rw-r--r--lib/stdlib/src/qlc.erl6
-rw-r--r--lib/stdlib/src/shell_default.erl3
-rw-r--r--lib/stdlib/src/sofs.erl357
-rw-r--r--lib/stdlib/src/stdlib.appup.src4
-rw-r--r--lib/stdlib/src/zip.erl62
-rw-r--r--lib/stdlib/test/base64_SUITE.erl2
-rw-r--r--lib/stdlib/test/beam_lib_SUITE.erl45
-rw-r--r--lib/stdlib/test/dets_SUITE.erl9
-rw-r--r--lib/stdlib/test/edlin_expand_SUITE.erl79
-rw-r--r--lib/stdlib/test/erl_lint_SUITE.erl104
-rw-r--r--lib/stdlib/test/erl_pp_SUITE.erl13
-rw-r--r--lib/stdlib/test/erl_scan_SUITE.erl15
-rw-r--r--lib/stdlib/test/ets_SUITE.erl217
-rw-r--r--lib/stdlib/test/ets_tough_SUITE.erl58
-rw-r--r--lib/stdlib/test/filelib_SUITE.erl55
-rw-r--r--lib/stdlib/test/filename_SUITE.erl75
-rw-r--r--lib/stdlib/test/gen_statem_SUITE.erl105
-rw-r--r--lib/stdlib/test/io_SUITE.erl262
-rw-r--r--lib/stdlib/test/lists_SUITE.erl2
-rw-r--r--lib/stdlib/test/qlc_SUITE.erl18
-rw-r--r--lib/stdlib/test/rand_SUITE.erl6
-rw-r--r--lib/stdlib/test/random_iolist.erl38
-rw-r--r--lib/stdlib/test/random_unicode_list.erl38
-rw-r--r--lib/stdlib/test/re_testoutput1_replacement_test.erl2
-rw-r--r--lib/stdlib/test/re_testoutput1_split_test.erl2
-rw-r--r--lib/stdlib/test/run_pcre_tests.erl73
-rw-r--r--lib/stdlib/test/shell_SUITE.erl2
-rw-r--r--lib/stdlib/test/sofs_SUITE.erl9
-rw-r--r--lib/stdlib/test/tar_SUITE.erl178
-rw-r--r--lib/stdlib/test/tar_SUITE_data/bsd.tarbin0 -> 9216 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/gnu.tarbin0 -> 30720 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/pax_mtime.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse00.tarbin0 -> 61440 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse01.tarbin0 -> 61440 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse01_empty.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse10.tarbin0 -> 61440 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse10_empty.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/star.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/v7.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/zip_SUITE.erl36
-rw-r--r--lib/syntax_tools/src/igor.erl12
-rw-r--r--lib/tools/doc/src/make.xml24
-rw-r--r--lib/tools/emacs/erlang-edoc.el12
-rw-r--r--lib/tools/emacs/erlang-eunit.el48
-rw-r--r--lib/tools/emacs/erlang-pkg.el4
-rw-r--r--lib/tools/emacs/erlang-skels.el14
-rw-r--r--lib/tools/emacs/erlang-start.el24
-rw-r--r--lib/tools/emacs/erlang-test.el91
-rw-r--r--lib/tools/emacs/erlang.el935
-rw-r--r--lib/tools/emacs/erldoc.el14
-rw-r--r--lib/tools/examples/xref_examples.erl2
-rw-r--r--lib/tools/src/make.erl76
-rw-r--r--lib/tools/test/Makefile4
-rw-r--r--lib/tools/test/make_SUITE.erl18
-rw-r--r--lib/typer/Makefile44
-rw-r--r--lib/typer/RELEASE_NOTES22
-rw-r--r--lib/typer/doc/Makefile40
-rw-r--r--lib/typer/doc/html/.gitignore0
-rw-r--r--lib/typer/doc/pdf/.gitignore0
-rw-r--r--lib/typer/doc/src/Makefile118
-rw-r--r--lib/typer/doc/src/book.xml42
-rw-r--r--lib/typer/doc/src/fascicules.xml12
-rw-r--r--lib/typer/doc/src/notes.xml111
-rw-r--r--lib/typer/doc/src/part_notes.xml36
-rw-r--r--lib/typer/doc/src/ref_man.xml36
-rw-r--r--lib/typer/doc/src/typer_app.xml44
-rw-r--r--lib/typer/ebin/.gitignore0
-rw-r--r--lib/typer/info2
-rw-r--r--lib/typer/src/Makefile111
-rw-r--r--lib/typer/src/typer.app.src11
-rw-r--r--lib/typer/src/typer.erl1120
-rw-r--r--lib/typer/test/Makefile65
-rw-r--r--lib/typer/test/typer.spec1
-rw-r--r--lib/typer/test/typer_SUITE.erl57
-rw-r--r--lib/typer/vsn.mk1
-rw-r--r--lib/wx/api_gen/gen_util.erl2
-rw-r--r--lib/wx/api_gen/wx_gen_cpp.erl2
-rw-r--r--lib/xmerl/src/xmerl_regexp.erl2
-rw-r--r--lib/xmerl/src/xmerl_sax_parser.erl33
-rw-r--r--lib/xmerl/src/xmerl_sax_parser.hrl9
-rw-r--r--lib/xmerl/src/xmerl_sax_parser_base.erlsrc144
-rw-r--r--lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc38
-rw-r--r--lib/xmerl/src/xmerl_sax_parser_list.erlsrc19
-rw-r--r--lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc50
-rw-r--r--lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc50
-rw-r--r--lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc50
-rw-r--r--lib/xmerl/src/xmerl_scan.erl31
-rw-r--r--lib/xmerl/test/Makefile4
-rw-r--r--lib/xmerl/test/xmerl_SUITE.erl47
-rw-r--r--lib/xmerl/test/xmerl_sax_SUITE.erl6
-rw-r--r--lib/xmerl/test/xmerl_sax_std_SUITE.erl100
-rw-r--r--lib/xmerl/test/xmerl_sax_stream_SUITE.erl245
-rw-r--r--lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one.xml17
-rw-r--r--lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one_junk.xml18
-rw-r--r--lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_two.xml34
662 files changed, 19503 insertions, 19617 deletions
diff --git a/lib/.gitignore b/lib/.gitignore
index b1da61706d..283393faa9 100644
--- a/lib/.gitignore
+++ b/lib/.gitignore
@@ -526,14 +526,6 @@
/orber/src/oe_erlang.erl
/orber/src/oe_erlang.hrl
-# percept
-
-/percept/doc/src/egd.xml
-/percept/doc/src/egd_ug.xml
-/percept/doc/src/percept.xml
-/percept/doc/src/percept_profile.xml
-/percept/doc/src/percept_ug.xml
-
# snmp
snmp/doc/intex.html
diff --git a/lib/Makefile b/lib/Makefile
index a7f3c9192f..ae466ed518 100644
--- a/lib/Makefile
+++ b/lib/Makefile
@@ -35,7 +35,7 @@ ALL_ERLANG_APPLICATIONS = xmerl edoc erl_docgen snmp otp_mibs erl_interface \
public_key ssl observer odbc diameter \
cosTransactions cosEvent cosTime cosNotification \
cosProperty cosFileTransfer cosEventDomain et megaco \
- eunit ssh typer percept eldap dialyzer hipe
+ eunit ssh eldap dialyzer hipe
ifdef BUILD_ALL
ERLANG_APPLICATIONS += $(ALL_ERLANG_APPLICATIONS)
diff --git a/lib/asn1/doc/src/Makefile b/lib/asn1/doc/src/Makefile
index 559836116f..9a388e4e8a 100644
--- a/lib/asn1/doc/src/Makefile
+++ b/lib/asn1/doc/src/Makefile
@@ -37,8 +37,7 @@ RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
# Target Specs
# ----------------------------------------------------
XML_APPLICATION_FILES = ref_man.xml
-XML_REF3_FILES = asn1ct.xml \
- asn1rt.xml
+XML_REF3_FILES = asn1ct.xml
GEN_XML = \
asn1_spec.xml
diff --git a/lib/asn1/doc/src/asn1_getting_started.xml b/lib/asn1/doc/src/asn1_getting_started.xml
index d40b294c39..c036d289fc 100644
--- a/lib/asn1/doc/src/asn1_getting_started.xml
+++ b/lib/asn1/doc/src/asn1_getting_started.xml
@@ -187,6 +187,14 @@ erlc -o ../asnfiles -I ../asnfiles -I /usr/local/standards/asn1 Person.asn</pre>
<item>
<p>DER encoding rule. Only when using option <c>-ber</c>.</p>
</item>
+ <tag><c>+maps</c></tag>
+ <item>
+ <p>Use maps instead of records to represent the <c>SEQUENCE</c> and
+ <c>SET</c> types. No <c>.hrl</c> files will be generated.
+ See the Section <seealso marker="asn1_getting_started#MAP_SEQ_SET">
+ Map representation for SEQUENCE and SET</seealso>
+ for more information.</p>
+ </item>
<tag><c>+asn1config</c></tag>
<item>
<p>This functionality works together with option
@@ -258,6 +266,10 @@ asn1ct:compile("H323-MESSAGES.asn1",[per]). </pre>
<c>{error, {asn1, Description}}</c> where
<c>Description</c> is
an Erlang term describing the error.</p>
+ <p>Currently, <c>Description</c> looks like this:
+ <c>{ErrorDescription, StackTrace}</c>. Applications should
+ not depend on the exact contents of <c>Description</c> as it
+ could change in the future.</p>
</section>
</section>
@@ -766,8 +778,11 @@ Pdu ::= SEQUENCE {
b REAL,
c OBJECT IDENTIFIER,
d NULL } </pre>
- <p>This is a 4-component structure called <c>Pdu</c>. The record format
- is the major format for representation of <c>SEQUENCE</c> in Erlang.
+ <p>This is a 4-component structure called <c>Pdu</c>. By default,
+ a <c>SEQUENCE</c> is represented by a record in Erlang.
+ It can also be represented as a map; see
+ <seealso marker="asn1_getting_started#MAP_SEQ_SET">
+ Map representation for SEQUENCE and SET</seealso>.
For each <c>SEQUENCE</c> and <c>SET</c> in an ASN.1 module an Erlang
record declaration is generated. For <c>Pdu</c>, a record
like the following is defined:</p>
@@ -878,6 +893,48 @@ SExt ::= SEQUENCE {
</section>
<section>
+ <marker id="MAP_SEQ_SET"></marker>
+ <title>Map representation for SEQUENCE and SET</title>
+ <p>If the ASN.1 module has been compiled with option <c>maps</c>,
+ the types <c>SEQUENCE</c> and <c>SET</c> are represented as maps.</p>
+ <p>In the following example, this ASN.1 specification is used:</p>
+ <pre>
+File DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+Seq1 ::= SEQUENCE {
+ a INTEGER DEFAULT 42,
+ b BOOLEAN OPTIONAL,
+ c IA5String
+}
+END </pre>
+
+ <p>Optional fields are to be omitted from the map if they have
+ no value:</p>
+
+ <pre>
+1> <input>asn1ct:compile('File', [per,maps]).</input>
+ok
+2> <input>{ok,E} = 'File':encode('Seq1', #{a=>0,c=>"string"}).</input>
+{ok,&lt;&lt;128,1,0,6,115,116,114,105,110,103&gt;&gt;} </pre>
+
+ <p>When decoding, optional fields will be omitted from the map:</p>
+
+ <pre>
+3> <input>'File':decode('Seq1', E).</input>
+{ok,#{a => 0,c => "string"}} </pre>
+
+ <p>Default values can be omitted from the map:</p>
+ <pre>
+4> <input>{ok,E2} = 'File':encode('Seq1', #{c=>"string"}).</input>
+{ok,&lt;&lt;0,6,115,116,114,105,110,103&gt;&gt;}
+5> <input>'File':decode('Seq1', E2).</input>
+{ok,#{a => 42,c => "string"}} </pre>
+
+ <note><p>It is not allowed to use the atoms <c>asn1_VALUE</c> and
+ <c>asn1_DEFAULT</c> with maps.</p></note>
+ </section>
+
+ <section>
<marker id="CHOICE"></marker>
<title>CHOICE</title>
<p>The type <c>CHOICE</c> is a space saver and is similar to the
@@ -1004,11 +1061,16 @@ T ::= CHOICE {
<section>
<title>Naming of Records in .hrl Files</title>
+ <p>When the option <c>maps</c> is given, no <c>.hrl</c> files
+ will be generated. The rest of this section describes the behavior
+ of the compiler when <c>maps</c> is not used.</p>
+
<p>When an ASN.1 specification is compiled, all defined types of type
- <c>SET</c> or <c>SEQUENCE</c> result in a corresponding record in the
- generated <c>.hrl</c> file. This is because the values for
- <c>SET</c> and <c>SEQUENCE</c> are represented as records as
- mentioned earlier.</p>
+ <c>SET</c> or <c>SEQUENCE</c> result in a corresponding record in the
+ generated <c>.hrl</c> file. This is because the values for
+ <c>SET</c> and <c>SEQUENCE</c> are represented as records
+ by default.</p>
+
<p>Some special cases of this functionality are presented in the
next section.</p>
@@ -1144,9 +1206,10 @@ SS ::= SET {
<p>This example shows that a function is generated by the compiler
that returns a valid Erlang representation of the value, although
the value is of a complex type.</p>
- <p>Furthermore, a macro is generated for each value in the <c>.hrl</c>
- file. So, the defined value <c>tt</c> can also be extracted by
- <c>?tt</c> in application code.</p>
+ <p>Furthermore, if the option <c>maps</c> is not used,
+ a macro is generated for each value in the <c>.hrl</c>
+ file. So, the defined value <c>tt</c> can also be extracted by
+ <c>?tt</c> in application code.</p>
</section>
<section>
diff --git a/lib/asn1/doc/src/asn1ct.xml b/lib/asn1/doc/src/asn1ct.xml
index e5a7b1bcc4..859d6a50bb 100644
--- a/lib/asn1/doc/src/asn1ct.xml
+++ b/lib/asn1/doc/src/asn1ct.xml
@@ -170,11 +170,24 @@ File3.asn</pre>
as for <c>ber</c>.
</p>
</item>
+ <tag><c>maps</c></tag>
+ <item>
+ <p>This option changes the representation of the types
+ <c>SEQUENCE</c> and <c>SET</c> to use maps (instead of
+ records). This option also suppresses the generation of
+ <c>.hrl</c> files.</p>
+ <p>For details, see Section
+ <seealso marker="asn1_getting_started#MAP_SEQ_SET">
+ Map representation for SEQUENCE and SET</seealso>
+ in the User's Guide.
+ </p>
+ </item>
<tag><c>compact_bit_string</c></tag>
<item>
<p>
The <c>BIT STRING</c> type is decoded to "compact notation".
<em>This option is not recommended for new code.</em>
+ This option cannot be combined with the option <c>maps</c>.
</p>
<p>For details, see Section
<seealso marker="asn1_getting_started#BIT STRING">
@@ -188,6 +201,7 @@ File3.asn</pre>
The <c>BIT STRING</c> type is decoded to the legacy
format, that is, a list of zeroes and ones.
<em>This option is not recommended for new code.</em>
+ This option cannot be combined with the option <c>maps</c>.
</p>
<p>For details, see Section
<seealso marker="asn1_getting_started#BIT STRING">BIT STRING</seealso>
@@ -202,7 +216,8 @@ File3.asn</pre>
marker="asn1_getting_started#BIT STRING">BIT STRING</seealso> and Section
<seealso marker="asn1_getting_started#OCTET STRING">OCTET
STRING</seealso> in the User's Guide.</p>
- <p><em>This option is not recommended for new code.</em></p>
+ <p><em>This option is not recommended for new code.</em>
+ This option cannot be combined with the option <c>maps</c>.</p>
</item>
<tag><c>{n2n, EnumTypeName}</c></tag>
<item>
@@ -321,45 +336,6 @@ File3.asn</pre>
</func>
<func>
- <name>encode(Module, Type, Value)-> {ok, Bytes} | {error, Reason}</name>
- <fsummary>Encodes an ASN.1 value.</fsummary>
- <type>
- <v>Module = Type = atom()</v>
- <v>Value = term()</v>
- <v>Bytes = binary()</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Encodes <c>Value</c> of <c>Type</c> defined in the <c>ASN.1</c> module
- <c>Module</c>. To get as fast execution as possible, the
- encode function performs only the rudimentary tests that input
- <c>Value</c> is a correct instance of <c>Type</c>. So, for example,
- the length of strings is
- not always checked. Returns <c>{ok, Bytes}</c> if successful or
- <c>{error, Reason}</c> if an error occurred.
- </p>
- <p>This function is deprecated.
- Use <c>Module:encode(Type, Value)</c> instead.</p>
- </desc>
- </func>
-
- <func>
- <name>decode(Module, Type, Bytes) -> {ok, Value} | {error, Reason}</name>
- <fsummary>Decode from Bytes into an ASN.1 value.</fsummary>
- <type>
- <v>Module = Type = atom()</v>
- <v>Value = Reason = term()</v>
- <v>Bytes = binary()</v>
- </type>
- <desc>
- <p>Decodes <c>Type</c> from <c>Module</c> from the binary
- <c>Bytes</c>. Returns <c>{ok, Value}</c> if successful.</p>
- <p>This function is deprecated.
- Use <c>Module:decode(Type, Bytes)</c> instead.</p>
- </desc>
- </func>
-
- <func>
<name>value(Module, Type) -> {ok, Value} | {error, Reason}</name>
<fsummary>Creates an ASN.1 value for test purposes.</fsummary>
<type>
@@ -424,11 +400,11 @@ File3.asn</pre>
<p>Schematically, the following occurs for each type in the module:</p>
<code type="none">
{ok, Value} = asn1ct:value(Module, Type),
-{ok, Bytes} = asn1ct:encode(Module, Type, Value),
-{ok, Value} = asn1ct:decode(Module, Type, Bytes).</code>
+{ok, Bytes} = Module:encode(Type, Value),
+{ok, Value} = Module:decode(Type, Bytes).</code>
<p>The <c>test</c> functions use the <c>*.asn1db</c> files
for all included modules. If they are located in a different
- directory than the current working directory, use the include
+ directory than the current working directory, use the <c>include</c>
option to add paths. This is only needed when automatically
generating values. For static values using <c>Value</c> no
options are needed.</p>
diff --git a/lib/asn1/doc/src/asn1rt.xml b/lib/asn1/doc/src/asn1rt.xml
deleted file mode 100644
index 3f53ca0f56..0000000000
--- a/lib/asn1/doc/src/asn1rt.xml
+++ /dev/null
@@ -1,135 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE erlref SYSTEM "erlref.dtd">
-
-<erlref>
- <header>
- <copyright>
- <year>1997</year><year>2016</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- </legalnotice>
-
- <title>asn1rt</title>
- <prepared>Kenneth Lundin</prepared>
- <responsible>Kenneth Lundin</responsible>
- <docno>1</docno>
- <approved>Kenneth Lundin</approved>
- <checked></checked>
- <date>97-10-04</date>
- <rev>A</rev>
- <file>asn1.sgml</file>
- </header>
- <module>asn1rt</module>
- <modulesummary>ASN.1 runtime support functions</modulesummary>
- <description>
- <warning>
- <p>
- All functions in this module are deprecated and will be
- removed in a future release.
- </p>
- </warning>
- </description>
-
- <funcs>
-
- <func>
- <name>decode(Module,Type,Bytes) -> {ok,Value}|{error,Reason}</name>
- <fsummary>Decodes from Bytes into an ASN.1 value.</fsummary>
- <type>
- <v>Module = Type = atom()</v>
- <v>Value = Reason = term()</v>
- <v>Bytes = binary</v>
- </type>
- <desc>
- <p>Decodes <c>Type</c> from <c>Module</c> from the binary <c>Bytes</c>.
- Returns <c>{ok,Value}</c> if successful.</p>
- <p>Use <c>Module:decode(Type, Bytes)</c> instead of this function.</p>
- </desc>
- </func>
-
- <func>
- <name>encode(Module,Type,Value)-> {ok,Bytes} | {error,Reason}</name>
- <fsummary>Encodes an ASN.1 value.</fsummary>
- <type>
- <v>Module = Type = atom()</v>
- <v>Value = term()</v>
- <v>Bytes = binary</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Encodes <c>Value</c> of <c>Type</c> defined in the <c>ASN.1</c>
- module <c>Module</c>. Returns a binary if successful. To get
- as fast execution as possible, the encode function performs
- only the rudimentary test that input <c>Value</c> is a correct
- instance of <c>Type</c>. For example, the length of strings is
- not always checked.</p>
- <p>Use <c>Module:encode(Type, Value)</c> instead of this function.</p>
- </desc>
- </func>
-
- <func>
- <name>info(Module) -> {ok,Info} | {error,Reason}</name>
- <fsummary>Returns compiler information about the Module.</fsummary>
- <type>
- <v>Module = atom()</v>
- <v>Info = list()</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Returns the version of the <c>ASN.1</c> compiler that was
- used to compile the module. It also returns the compiler options
- that were used.</p>
- <p>Use <c>Module:info()</c> instead of this function.</p>
- </desc>
- </func>
-
- <func>
- <name>utf8_binary_to_list(UTF8Binary) -> {ok,UnicodeList} | {error,Reason}</name>
- <fsummary>Transforms an UTF8 encoded binary to a unicode list.</fsummary>
- <type>
- <v>UTF8Binary = binary()</v>
- <v>UnicodeList = [integer()]</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Transforms a UTF8 encoded binary
- to a list of integers, where each integer represents one
- character as its unicode value. The function fails if the binary
- is not a properly encoded UTF8 string.</p>
- <p>Use <seealso marker="stdlib:unicode#characters_to_list-1">unicode:characters_to_list/1</seealso> instead of this function.</p>
- </desc>
- </func>
-
- <func>
- <name>utf8_list_to_binary(UnicodeList) -> {ok,UTF8Binary} | {error,Reason}</name>
- <fsummary>Transforms an unicode list to a UTF8 binary.</fsummary>
- <type>
- <v>UnicodeList = [integer()]</v>
- <v>UTF8Binary = binary()</v>
- <v>Reason = term()</v>
- </type>
- <desc>
- <p>Transforms a list of integers,
- where each integer represents one character as its unicode
- value, to a UTF8 encoded binary.</p>
- <p>Use <seealso marker="stdlib:unicode#characters_to_binary-1">unicode:characters_to_binary/1</seealso> instead of this function.</p>
- </desc>
- </func>
-
- </funcs>
-
-</erlref>
-
diff --git a/lib/asn1/examples/recordnames.txt b/lib/asn1/examples/recordnames.txt
index 78e30ab510..9b890b4aa7 100644
--- a/lib/asn1/examples/recordnames.txt
+++ b/lib/asn1/examples/recordnames.txt
@@ -1,6 +1,6 @@
For each ASN1 types SET and SEQUENCE a record is generated in the .hrl
file with the same name as the corresponding type.
-A decoded value is also returned as a record with the apropriate name.
+A decoded value is also returned as a record with the appropriate name.
An internally defined type as the type in component 'a' in the
following example will result in a record with name 'Seq_a':
diff --git a/lib/asn1/src/Makefile b/lib/asn1/src/Makefile
index 38cf2d496a..ba459f6cd3 100644
--- a/lib/asn1/src/Makefile
+++ b/lib/asn1/src/Makefile
@@ -68,7 +68,6 @@ CT_MODULES= \
$(EVAL_CT_MODULES)
RT_MODULES= \
- asn1rt \
asn1rt_nif
MODULES= $(CT_MODULES) $(RT_MODULES)
diff --git a/lib/asn1/src/asn1.app.src b/lib/asn1/src/asn1.app.src
index 1f8805ff5e..d2da727193 100644
--- a/lib/asn1/src/asn1.app.src
+++ b/lib/asn1/src/asn1.app.src
@@ -2,7 +2,6 @@
[{description, "The Erlang ASN1 compiler version %VSN%"},
{vsn, "%VSN%"},
{modules, [
- asn1rt,
asn1rt_nif
]},
{registered, [
diff --git a/lib/asn1/src/asn1_db.erl b/lib/asn1/src/asn1_db.erl
index 869ea310aa..a3e45ca915 100644
--- a/lib/asn1/src/asn1_db.erl
+++ b/lib/asn1/src/asn1_db.erl
@@ -20,7 +20,7 @@
%%
-module(asn1_db).
--export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/2,
+-export([dbstart/1,dbnew/3,dbload/1,dbload/4,dbsave/2,dbput/2,
dbput/3,dbget/2]).
-export([dbstop/0]).
@@ -37,13 +37,13 @@ dbstart(Includes0) ->
put(?MODULE, spawn_link(fun() -> init(Parent, Includes) end)),
ok.
-dbload(Module, Erule, Mtime) ->
- req({load, Module, Erule, Mtime}).
+dbload(Module, Erule, Maps, Mtime) ->
+ req({load, Module, {Erule,Maps}, Mtime}).
dbload(Module) ->
req({load, Module, any, {{0,0,0},{0,0,0}}}).
-dbnew(Module, Erule) -> req({new, Module, Erule}).
+dbnew(Module, Erule, Maps) -> req({new, Module, {Erule,Maps}}).
dbsave(OutFile, Module) -> cast({save, OutFile, Module}).
dbput(Module, K, V) -> cast({set, Module, K, V}).
dbput(Module, Kvs) -> cast({set, Module, Kvs}).
@@ -110,19 +110,19 @@ loop(#state{parent = Parent, monitor = MRef, table = Table,
ok = ets:tab2file(Mtab, TempFile),
ok = file:rename(TempFile, OutFile),
loop(State);
- {From, {new, Mod, Erule}} ->
+ {From, {new, Mod, EruleMaps}} ->
[] = ets:lookup(Table, Mod), %Assertion.
ModTableId = ets:new(list_to_atom(lists:concat(["asn1_",Mod])), []),
ets:insert(Table, {Mod, ModTableId}),
- ets:insert(ModTableId, {?MAGIC_KEY, info(Erule)}),
+ ets:insert(ModTableId, {?MAGIC_KEY, info(EruleMaps)}),
reply(From, ok),
loop(State);
- {From, {load, Mod, Erule, Mtime}} ->
+ {From, {load, Mod, EruleMaps, Mtime}} ->
case ets:member(Table, Mod) of
true ->
reply(From, ok);
false ->
- case load_table(Mod, Erule, Mtime, Includes) of
+ case load_table(Mod, EruleMaps, Mtime, Includes) of
{ok, ModTableId} ->
ets:insert(Table, {Mod, ModTableId}),
reply(From, ok);
@@ -151,20 +151,20 @@ lookup(Tab, K) ->
[{K,V}] -> V
end.
-info(Erule) ->
- {asn1ct:vsn(),Erule}.
+info(EruleMaps) ->
+ {asn1ct:vsn(),EruleMaps}.
-load_table(Mod, Erule, Mtime, Includes) ->
+load_table(Mod, EruleMaps, Mtime, Includes) ->
Base = lists:concat([Mod, ".asn1db"]),
case path_find(Includes, Mtime, Base) of
error ->
error;
- {ok,ModTab} when Erule =:= any ->
+ {ok,ModTab} when EruleMaps =:= any ->
{ok,ModTab};
{ok,ModTab} ->
Vsn = asn1ct:vsn(),
case ets:lookup(ModTab, ?MAGIC_KEY) of
- [{_,{Vsn,Erule}}] ->
+ [{_,{Vsn,EruleMaps}}] ->
%% Correct version and encoding rule.
{ok,ModTab};
_ ->
diff --git a/lib/asn1/src/asn1_records.hrl b/lib/asn1/src/asn1_records.hrl
index af10c1771c..06a9e3ab03 100644
--- a/lib/asn1/src/asn1_records.hrl
+++ b/lib/asn1/src/asn1_records.hrl
@@ -28,6 +28,7 @@
-define('COMPLETE_ENCODE',1).
-define('TLV_DECODE',2).
+-define(MISSING_IN_MAP, asn1__MISSING_IN_MAP).
-record(module,{pos,name,defid,tagdefault='EXPLICIT',exports={exports,[]},imports={imports,[]}, extensiondefault=empty,typeorval}).
@@ -96,6 +97,28 @@
error_context %Top-level thingie (contains line numbers)
}).
+%% Code generation parameters and options.
+-record(gen,
+ {erule=ber :: 'ber' | 'per',
+ der=false :: boolean(),
+ aligned=false :: boolean(),
+ rec_prefix="" :: string(),
+ macro_prefix="" :: string(),
+ pack=record :: 'record' | 'map',
+ options=[] :: [any()]
+ }).
+
+%% Abstract intermediate representation.
+-record(abst,
+ {name :: module(), %Name of module.
+ types, %Types.
+ values, %Values.
+ ptypes, %Parameterized types.
+ classes, %Classes.
+ objects, %Objects.
+ objsets %Object sets.
+ }).
+
%% state record used by back-end at partial decode
%% active is set to 'yes' when a partial decode function is generated.
%% prefix is set to 'dec-inc-' or 'dec-partial-' is for
diff --git a/lib/asn1/src/asn1ct.erl b/lib/asn1/src/asn1ct.erl
index 8783b5418d..9f77a557e5 100644
--- a/lib/asn1/src/asn1ct.erl
+++ b/lib/asn1/src/asn1ct.erl
@@ -20,17 +20,12 @@
%%
%%
-module(asn1ct).
--deprecated([decode/3,encode/3]).
--compile([{nowarn_deprecated_function,{asn1rt,decode,3}},
- {nowarn_deprecated_function,{asn1rt,encode,2}},
- {nowarn_deprecated_function,{asn1rt,encode,3}}]).
%% Compile Time functions for ASN.1 (e.g ASN.1 compiler).
%%-compile(export_all).
%% Public exports
-export([compile/1, compile/2]).
--export([encode/2, encode/3, decode/3]).
-export([test/1, test/2, test/3, value/2, value/3]).
%% Application internal exports
-export([compile_asn/3,compile_asn1/3,compile_py/3,compile/3,
@@ -198,7 +193,7 @@ check_pass(#st{code=M,file=File,includes=Includes,
erule=Erule,dbfile=DbFile,opts=Opts,
inputmodules=InputModules}=St) ->
start(Includes),
- case asn1ct_check:storeindb(#state{erule=Erule}, M) of
+ case asn1ct_check:storeindb(#state{erule=Erule,options=Opts}, M) of
ok ->
Module = asn1_db:dbget(M#module.name, 'MODULE'),
State = #state{mname=Module#module.name,
@@ -221,8 +216,8 @@ check_pass(#st{code=M,file=File,includes=Includes,
{error,St#st{error=Reason}}
end.
-save_pass(#st{code=M,erule=Erule}=St) ->
- ok = asn1ct_check:storeindb(#state{erule=Erule}, M),
+save_pass(#st{code=M,erule=Erule,opts=Opts}=St) ->
+ ok = asn1ct_check:storeindb(#state{erule=Erule,options=Opts}, M),
{ok,St}.
parse_listing(#st{code=Code,outfile=OutFile0}=St) ->
@@ -241,12 +236,8 @@ abs_listing(#st{code={M,_},outfile=OutFile}) ->
generate_pass(#st{code=Code,outfile=OutFile,erule=Erule,opts=Opts}=St0) ->
St = St0#st{code=undefined}, %Reclaim heap space
- case generate(Code, OutFile, Erule, Opts) of
- {error,Reason} ->
- {error,St#st{error=Reason}};
- ok ->
- {ok,St}
- end.
+ generate(Code, OutFile, Erule, Opts),
+ {ok,St}.
compile_pass(#st{outfile=OutFile,opts=Opts0}=St) ->
asn1_db:dbstop(), %Reclaim memory.
@@ -839,37 +830,55 @@ delete_double_of_symbol1([],Acc) ->
%%***********************************
-generate({M,GenTOrV}, OutFile, EncodingRule, Options) ->
+generate({M,CodeTuple}, OutFile, EncodingRule, Options) ->
+ {Types,Values,Ptypes,Classes,Objects,ObjectSets} = CodeTuple,
+ Code = #abst{name=M#module.name,
+ types=Types,values=Values,ptypes=Ptypes,
+ classes=Classes,objects=Objects,objsets=ObjectSets},
debug_on(Options),
setup_bit_string_format(Options),
setup_legacy_erlang_types(Options),
- put(encoding_options,Options),
asn1ct_table:new(check_functions),
+ Gen = init_gen_record(EncodingRule, Options),
+
+ check_maps_option(Gen),
+
%% create decoding function names and taglists for partial decode
- case (catch specialized_decode_prepare(EncodingRule,M,GenTOrV,Options)) of
- {error, Reason} -> warning("Error in configuration file: ~n~p~n",
- [Reason], Options,
- "Error in configuration file");
- _ -> ok
+ try
+ specialized_decode_prepare(Gen, M)
+ catch
+ throw:{error, Reason} ->
+ warning("Error in configuration file: ~n~p~n",
+ [Reason], Options,
+ "Error in configuration file")
end,
- Result =
- case (catch asn1ct_gen:pgen(OutFile,EncodingRule,
- M#module.name,GenTOrV,Options)) of
- {'EXIT',Reason2} ->
- error("~p~n",[Reason2],Options),
- {error,Reason2};
- _ ->
- ok
- end,
+ asn1ct_gen:pgen(OutFile, Gen, Code),
debug_off(Options),
- erase(encoding_options),
cleanup_bit_string_format(),
erase(tlv_format), % used in ber
erase(class_default_type),% used in ber
asn1ct_table:delete(check_functions),
- Result.
+ ok.
+
+init_gen_record(EncodingRule, Options) ->
+ Erule = case EncodingRule of
+ uper -> per;
+ _ -> EncodingRule
+ end,
+ Der = proplists:get_bool(der, Options),
+ Aligned = EncodingRule =:= per,
+ RecPrefix = proplists:get_value(record_name_prefix, Options, ""),
+ MacroPrefix = proplists:get_value(macro_name_prefix, Options, ""),
+ Pack = case proplists:get_value(maps, Options, false) of
+ true -> map;
+ false -> record
+ end,
+ #gen{erule=Erule,der=Der,aligned=Aligned,
+ rec_prefix=RecPrefix,macro_prefix=MacroPrefix,
+ pack=Pack,options=Options}.
+
setup_legacy_erlang_types(Opts) ->
F = case lists:member(legacy_erlang_types, Opts) of
@@ -915,6 +924,26 @@ cleanup_bit_string_format() ->
get_bit_string_format() ->
get(bit_string_format).
+check_maps_option(#gen{pack=map}) ->
+ case get_bit_string_format() of
+ bitstring ->
+ ok;
+ _ ->
+ Message1 = "The 'maps' option must not be combined with "
+ "'compact_bit_string' or 'legacy_bit_string'",
+ exit({error,{asn1,Message1}})
+ end,
+ case use_legacy_types() of
+ false ->
+ ok;
+ true ->
+ Message2 = "The 'maps' option must not be combined with "
+ "'legacy_erlang_types'",
+ exit({error,{asn1,Message2}})
+ end;
+check_maps_option(#gen{}) ->
+ ok.
+
%% parse_and_save parses an asn1 spec and saves the unchecked parse
%% tree in a data base file.
@@ -924,22 +953,27 @@ parse_and_save(Module,S) ->
SourceDir = S#state.sourcedir,
Includes = [I || {i,I} <- Options],
Erule = S#state.erule,
+ Maps = lists:member(maps, Options),
case get_input_file(Module, [SourceDir|Includes]) of
%% search for asn1 source
{file,SuffixedASN1source} ->
Mtime = filelib:last_modified(SuffixedASN1source),
- case asn1_db:dbload(Module, Erule, Mtime) of
+ case asn1_db:dbload(Module, Erule, Maps, Mtime) of
ok -> ok;
error -> parse_and_save1(S, SuffixedASN1source, Options)
end;
- Err ->
+ Err when not Maps ->
case asn1_db:dbload(Module) of
ok ->
+ %% FIXME: This should be an error.
warning("could not do a consistency check of the ~p file: no asn1 source file was found.~n",
[lists:concat([Module,".asn1db"])],Options);
error ->
ok
end,
+ {error,{asn1,input_file_error,Err}};
+ Err ->
+ %% Always fail directly when the 'maps' option is used.
{error,{asn1,input_file_error,Err}}
end.
@@ -1002,9 +1036,8 @@ input_file_type(File) ->
end
end;
".asn1config" ->
- case read_config_file(File,asn1_module) of
+ case read_config_file_info(File, asn1_module) of
{ok,Asn1Module} ->
-% put(asn1_config_file,File),
input_file_type(Asn1Module);
Error ->
Error
@@ -1097,16 +1130,27 @@ translate_options([H|T]) ->
translate_options([]) -> [].
remove_asn_flags(Options) ->
- [X || X <- Options,
- X /= get_rule(Options),
- X /= optimize,
- X /= compact_bit_string,
- X /= legacy_bit_string,
- X /= legacy_erlang_types,
- X /= debug,
- X /= asn1config,
- X /= record_name_prefix].
-
+ [X || X <- Options, not is_asn1_flag(X)].
+
+is_asn1_flag(asn1config) -> true;
+is_asn1_flag(ber) -> true;
+is_asn1_flag(compact_bit_string) -> true;
+is_asn1_flag(debug) -> true;
+is_asn1_flag(der) -> true;
+is_asn1_flag(legacy_bit_string) -> true;
+is_asn1_flag({macro_name_prefix,_}) -> true;
+is_asn1_flag({n2n,_}) -> true;
+is_asn1_flag(noobj) -> true;
+is_asn1_flag(no_ok_wrapper) -> true;
+is_asn1_flag(optimize) -> true;
+is_asn1_flag(per) -> true;
+is_asn1_flag({record_name_prefix,_}) -> true;
+is_asn1_flag(undec_rec) -> true;
+is_asn1_flag(uper) -> true;
+is_asn1_flag(verbose) -> true;
+%% 'warnings_as_errors' is intentionally passed through to the compiler.
+is_asn1_flag(_) -> false.
+
debug_on(Options) ->
case lists:member(debug,Options) of
true ->
@@ -1271,21 +1315,6 @@ pretty2(Module,AbsFile) ->
start(Includes) when is_list(Includes) ->
asn1_db:dbstart(Includes).
-
-encode(Module,Term) ->
- asn1rt:encode(Module,Term).
-
-encode(Module,Type,Term) when is_list(Module) ->
- asn1rt:encode(list_to_atom(Module),Type,Term);
-encode(Module,Type,Term) ->
- asn1rt:encode(Module,Type,Term).
-
-decode(Module,Type,Bytes) when is_list(Module) ->
- asn1rt:decode(list_to_atom(Module),Type,Bytes);
-decode(Module,Type,Bytes) ->
- asn1rt:decode(Module,Type,Bytes).
-
-
test(Module) -> test_module(Module, []).
test(Module, [] = Options) -> test_module(Module, Options);
@@ -1330,10 +1359,10 @@ test_type(Module, Type) ->
test_value(Module, Type, Value) ->
in_process(fun() ->
- case catch encode(Module, Type, Value) of
+ case catch Module:encode(Type, Value) of
{ok, Bytes} ->
NewBytes = prepare_bytes(Bytes),
- case decode(Module, Type, NewBytes) of
+ case Module:decode(Type, NewBytes) of
{ok, Value} ->
{ok, {Module, Type, Value}};
{ok, Res} ->
@@ -1390,25 +1419,26 @@ prepare_bytes(Bytes) -> list_to_binary(Bytes).
vsn() ->
?vsn.
-specialized_decode_prepare(Erule,M,TsAndVs,Options) ->
- case lists:member(asn1config,Options) of
+specialized_decode_prepare(#gen{erule=ber,options=Options}=Gen, M) ->
+ case lists:member(asn1config, Options) of
true ->
- partial_decode_prepare(Erule,M,TsAndVs,Options);
- _ ->
+ special_decode_prepare_1(Gen, M);
+ false ->
ok
- end.
+ end;
+specialized_decode_prepare(_, _) ->
+ ok.
+
%% Reads the configuration file if it exists and stores information
%% about partial decode and incomplete decode
-partial_decode_prepare(ber,M,TsAndVs,Options) when is_tuple(TsAndVs) ->
+special_decode_prepare_1(#gen{options=Options}=Gen, M) ->
%% read configure file
-
- ModName =
- case lists:keysearch(asn1config,1,Options) of
- {value,{_,MName}} -> MName;
- _ -> M#module.name
- end,
+ ModName = case lists:keyfind(asn1config, 1, Options) of
+ {_,MName} -> MName;
+ false -> M#module.name
+ end,
%% io:format("ModName: ~p~nM#module.name: ~p~n~n",[ModName,M#module.name]),
- case read_config_file(ModName) of
+ case read_config_file(Gen, ModName) of
no_config_file ->
ok;
CfgList ->
@@ -1427,11 +1457,7 @@ partial_decode_prepare(ber,M,TsAndVs,Options) when is_tuple(TsAndVs) ->
Part_inc_tlv_tags = tlv_tags(CommandList2),
save_config(partial_incomplete_decode,Part_inc_tlv_tags),
save_gen_state(exclusive_decode,ExclusiveDecode,Part_inc_tlv_tags)
- end;
-partial_decode_prepare(_,_,_,_) ->
- ok.
-
-
+ end.
%% create_partial_inc_decode_gen_info/2
%%
@@ -1883,46 +1909,38 @@ tlv_tag1(<<0:1,PartialTag:7>>,Acc) ->
tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) ->
tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag).
-%% reads the content from the configuration file and returns the
-%% selected part choosen by InfoType. Assumes that the config file
+%% Reads the content from the configuration file and returns the
+%% selected part chosen by InfoType. Assumes that the config file
%% content is an Erlang term.
-read_config_file(ModuleName,InfoType) when is_atom(InfoType) ->
- CfgList = read_config_file(ModuleName),
- get_config_info(CfgList,InfoType).
+read_config_file_info(ModuleName, InfoType) when is_atom(InfoType) ->
+ Name = ensure_ext(ModuleName, ".asn1config"),
+ CfgList = read_config_file0(Name, []),
+ get_config_info(CfgList, InfoType).
+read_config_file(#gen{options=Options}, ModuleName) ->
+ Name = ensure_ext(ModuleName, ".asn1config"),
+ Includes = [I || {i,I} <- Options],
+ read_config_file0(Name, ["."|Includes]).
-read_config_file(ModuleName) ->
- case file:consult(lists:concat([ModuleName,'.asn1config'])) of
+read_config_file0(Name, [D|Dirs]) ->
+ case file:consult(filename:join(D, Name)) of
{ok,CfgList} ->
CfgList;
{error,enoent} ->
- Options = get(encoding_options),
- Includes = [I || {i,I} <- Options],
- read_config_file1(ModuleName,Includes);
+ read_config_file0(Name, Dirs);
{error,Reason} ->
Error = "error reading asn1 config file: " ++
file:format_error(Reason),
throw({error,Error})
- end.
-read_config_file1(ModuleName,[]) ->
- case filename:extension(ModuleName) of
- ".asn1config" ->
- no_config_file;
- _ ->
- read_config_file(lists:concat([ModuleName,".asn1config"]))
end;
-read_config_file1(ModuleName,[H|T]) ->
-% File = filename:join([H,lists:concat([ModuleName,'.asn1config'])]),
- File = filename:join([H,ModuleName]),
- case file:consult(File) of
- {ok,CfgList} ->
- CfgList;
- {error,enoent} ->
- read_config_file1(ModuleName,T);
- {error,Reason} ->
- Error = "error reading asn1 config file: " ++
- file:format_error(Reason),
- throw({error,Error})
+read_config_file0(_, []) ->
+ no_config_file.
+
+ensure_ext(ModuleName, Ext) ->
+ Name = filename:join([ModuleName]),
+ case filename:extension(Name) of
+ Ext -> Name;
+ _ -> Name ++ Ext
end.
get_config_info(CfgList,InfoType) ->
@@ -2402,8 +2420,10 @@ format_error({write_error,File,Reason}) ->
io_lib:format("writing output file ~s failed: ~s",
[File,file:format_error(Reason)]).
-is_error(S) when is_record(S, state) ->
- is_error(S#state.options);
+is_error(#state{options=Opts}) ->
+ is_error(Opts);
+is_error(#gen{options=Opts}) ->
+ is_error(Opts);
is_error(O) ->
lists:member(errors, O) orelse is_verbose(O).
@@ -2412,8 +2432,10 @@ is_warning(S) when is_record(S, state) ->
is_warning(O) ->
lists:member(warnings, O) orelse is_verbose(O).
-is_verbose(S) when is_record(S, state) ->
- is_verbose(S#state.options);
+is_verbose(#state{options=Opts}) ->
+ is_verbose(Opts);
+is_verbose(#gen{options=Opts}) ->
+ is_verbose(Opts);
is_verbose(O) ->
lists:member(verbose, O).
diff --git a/lib/asn1/src/asn1ct_check.erl b/lib/asn1/src/asn1ct_check.erl
index f2c895bfaa..4f04b78241 100644
--- a/lib/asn1/src/asn1ct_check.erl
+++ b/lib/asn1/src/asn1ct_check.erl
@@ -2239,12 +2239,18 @@ normalized_record(SorS,S,Value,Components,NameList) ->
case is_record_normalized(S,NewName,Value,length(Components)) of
true ->
Value;
- _ ->
+ false ->
NoComps = length(Components),
ListOfVals = normalize_seq_or_set(SorS,S,Value,Components,NameList,[]),
- NoComps = length(ListOfVals), %% Assert
- list_to_tuple([NewName|ListOfVals])
+ NoComps = length(ListOfVals), %Assertion.
+ case use_maps(S) of
+ false ->
+ list_to_tuple([NewName|ListOfVals]);
+ true ->
+ create_map_value(Components, ListOfVals)
+ end
end.
+
is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) ->
case get_referenced_type(S,V) of
{_M,#valuedef{type=_T1,value=V2}} ->
@@ -2253,9 +2259,20 @@ is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) ->
end;
is_record_normalized(_S,Name,Value,NumComps) when is_tuple(Value) ->
(tuple_size(Value) =:= (NumComps + 1)) andalso (element(1, Value) =:= Name);
+is_record_normalized(_S, _Name, Value, _NumComps) when is_map(Value) ->
+ true;
is_record_normalized(_,_,_,_) ->
false.
+use_maps(#state{options=Opts}) ->
+ lists:member(maps, Opts).
+
+create_map_value(Components, ListOfVals) ->
+ Zipped = lists:zip(Components, ListOfVals),
+ L = [{Name,V} || {#'ComponentType'{name=Name},V} <- Zipped,
+ V =/= asn1_NOVALUE],
+ maps:from_list(L).
+
normalize_seq_or_set(SorS, S,
[{#seqtag{val=Cname},V}|Vs],
[#'ComponentType'{name=Cname,typespec=TS}|Cs],
@@ -4192,7 +4209,7 @@ iof_associated_type1(S,C) ->
%% fieldname=[{typefieldreference,'Type'}],
fieldname={'Type',[]},
type=Typefield_type},
- IOFComponents =
+ IOFComponents0 =
[#'ComponentType'{name='type-id',
typespec=#type{tag=C1TypeTag,
def=ObjectIdentifier,
@@ -4209,6 +4226,7 @@ iof_associated_type1(S,C) ->
tablecinf=Comp2tablecinf},
prop=mandatory,
tags=[{'CONTEXT',0}]}],
+ IOFComponents = textual_order(IOFComponents0),
#'SEQUENCE'{tablecinf=TableCInf,
components=simplify_comps(IOFComponents)}.
@@ -4930,7 +4948,7 @@ componentrelation_leadingattr(S,CompList) ->
%%FIXME expand_ExtAddGroups([C#'ExtensionAdditionGroup'{components=ExtAdds}|T],
%% CurrPos,PosAcc,CompAcc) ->
-%% expand_ExtAddGroups(T,CurrPos+ L = lenght(ExtAdds),[{CurrPos,L}|PosAcc],ExtAdds++CompAcc);
+%% expand_ExtAddGroups(T,CurrPos+ L = length(ExtAdds),[{CurrPos,L}|PosAcc],ExtAdds++CompAcc);
%% expand_ExtAddGroups([C|T],CurrPos,PosAcc,CompAcc) ->
%% expand_ExtAddGroups(T,CurrPos+ 1,PosAcc,[C|CompAcc]);
%% expand_ExtAddGroups([],_CurrPos,PosAcc,CompAcc) ->
@@ -5673,7 +5691,8 @@ storeindb(S0, #module{name=ModName,typeorval=TVlist0}=M) ->
storeindb_1(S, #module{name=ModName}=M, TVlist0, TVlist) ->
NewM = M#module{typeorval=findtypes_and_values(TVlist0)},
- asn1_db:dbnew(ModName, S#state.erule),
+ Maps = lists:member(maps, S#state.options),
+ asn1_db:dbnew(ModName, S#state.erule, Maps),
asn1_db:dbput(ModName, 'MODULE', NewM),
asn1_db:dbput(ModName, TVlist),
include_default_class(S, NewM#module.name),
diff --git a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
index 325bea5879..16af09bca9 100644
--- a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
@@ -32,7 +32,7 @@
-include("asn1_records.hrl").
--import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/0]).
+-import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/1]).
-define(ASN1CT_GEN_BER,asn1ct_gen_ber_bin_v2).
@@ -57,7 +57,7 @@
%%===============================================================================
%%===============================================================================
-gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
+gen_encode_sequence(Gen, Typename, #type{}=D) ->
asn1ct_name:start(),
asn1ct_name:new(term),
asn1ct_name:new(bytes),
@@ -67,8 +67,12 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
ValName =
case Typename of
['EXTERNAL'] ->
+ Tr = case Gen of
+ #gen{pack=record} -> transform_to_EXTERNAL1990;
+ #gen{pack=map} -> transform_to_EXTERNAL1990_maps
+ end,
emit([indent(4),"NewVal = ",
- {call,ext,transform_to_EXTERNAL1990,["Val"]},
+ {call,ext,Tr,["Val"]},
com,nl]),
"NewVal";
_ ->
@@ -90,18 +94,9 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
{Rl,El} -> Rl ++ El;
_ -> CompList
end,
-
-%% don't match recordname for now, because of compatibility reasons
-%% emit(["{'",asn1ct_gen:list2rname(Typename),"'"]),
- emit(["{_"]),
- case length(CompList1) of
- 0 ->
- true;
- CompListLen ->
- emit([","]),
- mkcindexlist([Tc || Tc <- lists:seq(1,CompListLen)])
- end,
- emit(["} = ",ValName,",",nl]),
+
+ enc_match_input(Gen, ValName, CompList1),
+
EncObj =
case TableConsInfo of
#simpletableattributes{usedclassfield=Used,
@@ -125,7 +120,7 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
emit([ObjectEncode," = ",nl,
" ",{asis,ObjSetMod},":'getenc_",ObjSetName,
"'("]),
- ValueMatch = value_match(ValueIndex,
+ ValueMatch = value_match(Gen, ValueIndex,
lists:concat(["Cindex",N])),
emit([indent(35),ValueMatch,"),",nl]),
{AttrN,ObjectEncode};
@@ -144,7 +139,7 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
end
end,
- gen_enc_sequence_call(Erules,Typename,CompList1,1,Ext,EncObj),
+ gen_enc_sequence_call(Gen, Typename, CompList1, 1, Ext, EncObj),
emit([nl," BytesSoFar = "]),
case SeqOrSet of
@@ -168,7 +163,36 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
call(encode_tags, ["TagIn","BytesSoFar","LenSoFar"]),
emit([".",nl]).
-gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
+enc_match_input(#gen{pack=record}, ValName, CompList) ->
+ Len = length(CompList),
+ Vars = [lists:concat(["Cindex",N]) || N <- lists:seq(1, Len)],
+ RecordName = "_",
+ emit(["{",lists:join(",", [RecordName|Vars]),"} = ",ValName,com,nl]);
+enc_match_input(#gen{pack=map}, ValName, CompList) ->
+ Len = length(CompList),
+ Vars = [lists:concat(["Cindex",N]) || N <- lists:seq(1, Len)],
+ Zipped = lists:zip(CompList, Vars),
+ M = [[{asis,Name},":=",Var] ||
+ {#'ComponentType'{prop=mandatory,name=Name},Var} <- Zipped],
+ case M of
+ [] ->
+ ok;
+ [_|_] ->
+ emit(["#{",lists:join(",", M),"} = ",ValName,com,nl])
+ end,
+ Os0 = [{Name,Var} ||
+ {#'ComponentType'{prop=Prop,name=Name},Var} <- Zipped,
+ Prop =/= mandatory],
+ F = fun({Name,Var}) ->
+ [Var," = case ",ValName," of\n"
+ " #{",{asis,Name},":=",Var,"_0} -> ",
+ Var,"_0;\n"
+ " _ -> ",atom_to_list(?MISSING_IN_MAP),"\n"
+ "end"]
+ end,
+ emit(lists:join(",\n", [F(E) || E <- Os0]++[[]])).
+
+gen_decode_sequence(Gen, Typename, #type{}=D) ->
asn1ct_name:start(),
asn1ct_name:new(tag),
#'SEQUENCE'{tablecinf=TableConsInfo,components=CList0} = D#type.def,
@@ -225,15 +249,20 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
_ ->
{false,false}
end,
- RecordName = lists:concat([get_record_name_prefix(),
- asn1ct_gen:list2rname(Typename)]),
- case gen_dec_sequence_call(Erules,Typename,CompList2,Ext,DecObjInf) of
- no_terms -> % an empty sequence
- emit([nl,nl]),
- demit(["Result = "]), %dbg
- %% return value as record
+ RecordName0 = lists:concat([get_record_name_prefix(Gen),
+ asn1ct_gen:list2rname(Typename)]),
+ RecordName = list_to_atom(RecordName0),
+ case gen_dec_sequence_call(Gen, Typename, CompList2, Ext, DecObjInf) of
+ no_terms -> % an empty sequence
asn1ct_name:new(rb),
- emit([" {'",RecordName,"'}.",nl,nl]);
+ case Gen of
+ #gen{pack=record} ->
+ emit([nl,nl,
+ " {'",RecordName,"'}.",nl,nl]);
+ #gen{pack=map} ->
+ emit([nl,nl,
+ " #{}.",nl,nl])
+ end;
{LeadingAttrTerm,PostponedDecArgs} ->
emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
@@ -243,7 +272,7 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
ok;
{[{ObjSetRef,LeadingAttr,Term}],PostponedDecArgs} ->
DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
- ValueMatch = value_match(ValueIndex,Term),
+ ValueMatch = value_match(Gen, ValueIndex,Term),
{ObjSetMod,ObjSetName} = ObjSetRef,
emit([DecObj," =",nl,
" ",{asis,ObjSetMod},":'getdec_",ObjSetName,"'(",
@@ -263,22 +292,64 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
"end,",nl])
end,
asn1ct_name:new(rb),
- case Typename of
- ['EXTERNAL'] ->
- emit([" OldFormat={'",RecordName,
- "', "]),
- mkvlist(asn1ct_name:all(term)),
- emit(["},",nl]),
- emit([" ",
- {call,ext,transform_to_EXTERNAL1994,
- ["OldFormat"]},".",nl]);
- _ ->
- emit([" {'",RecordName,"', "]),
- mkvlist(asn1ct_name:all(term)),
- emit(["}.",nl,nl])
- end
+ gen_dec_pack(Gen, RecordName, Typename, CompList),
+ emit([".",nl])
end.
+gen_dec_pack(Gen, RecordName, Typename, CompList) ->
+ case Typename of
+ ['EXTERNAL'] ->
+ dec_external(Gen, RecordName);
+ _ ->
+ asn1ct_name:new(res),
+ gen_dec_do_pack(Gen, RecordName, CompList),
+ emit([com,nl,
+ {curr,res}])
+ end.
+
+dec_external(#gen{pack=record}, RecordName) ->
+ All = [{var,Term} || Term <- asn1ct_name:all(term)],
+ Record = [{asis,RecordName}|All],
+ emit(["OldFormat={",lists:join(",", Record),"},",nl,
+ {call,ext,transform_to_EXTERNAL1994,
+ ["OldFormat"]}]);
+dec_external(#gen{pack=map}, _RecordName) ->
+ Vars = asn1ct_name:all(term),
+ Names = ['direct-reference','indirect-reference',
+ 'data-value-descriptor',encoding],
+ Zipped = lists:zip(Names, Vars),
+ MapInit = lists:join(",", [["'",N,"'=>",{var,V}] || {N,V} <- Zipped]),
+ emit(["OldFormat = #{",MapInit,"}",com,nl,
+ "ASN11994Format =",nl,
+ {call,ext,transform_to_EXTERNAL1994_maps,
+ ["OldFormat"]}]).
+
+gen_dec_do_pack(#gen{pack=record}, RecordName, _CompList) ->
+ All = asn1ct_name:all(term),
+ L = [{asis,RecordName}|[{var,Var} || Var <- All]],
+ emit([{curr,res}," = {",lists:join(",", L),"}"]);
+gen_dec_do_pack(#gen{pack=map}, _, CompList) ->
+ Zipped = lists:zip(CompList, asn1ct_name:all(term)),
+ PF = fun({#'ComponentType'{prop='OPTIONAL'},_}) -> false;
+ ({_,_}) -> true
+ end,
+ {Mandatory,Optional} = lists:partition(PF, Zipped),
+ L = [[{asis,Name},"=>",{var,Var}] ||
+ {#'ComponentType'{name=Name},Var} <- Mandatory],
+ emit([{curr,res}," = #{",lists:join(",", L),"}"]),
+ gen_dec_map_optional(Optional).
+
+gen_dec_map_optional([{#'ComponentType'{name=Name},Var}|T]) ->
+ asn1ct_name:new(res),
+ emit([com,nl,
+ {curr,res}," = case ",{var,Var}," of",nl,
+ " asn1_NOVALUE -> ",{prev,res},";",nl,
+ " _ -> ",{prev,res},"#{",{asis,Name},"=>",{var,Var},"}",nl,
+ "end"]),
+ gen_dec_map_optional(T);
+gen_dec_map_optional([]) ->
+ ok.
+
gen_dec_postponed_decs(_,[]) ->
emit(nl);
gen_dec_postponed_decs(DecObj,[{_Cname,{FirstPFN,PFNList},Term,
@@ -327,7 +398,7 @@ emit_opt_or_mand_check(Value,TmpTerm) ->
gen_encode_set(Erules,Typename,D) when is_record(D,type) ->
gen_encode_sequence(Erules,Typename,D).
-gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
+gen_decode_set(Gen, Typename, #type{}=D) ->
asn1ct_name:start(),
%% asn1ct_name:new(term),
asn1ct_name:new(tag),
@@ -393,7 +464,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
_ ->
emit(["SetFun = fun(FunTlv) ->", nl]),
emit(["case FunTlv of ",nl]),
- NextNum = gen_dec_set_cases(Erules,Typename,CompList,1),
+ NextNum = gen_dec_set_cases(Gen, Typename, CompList, 1),
emit([indent(6), {curr,else}," -> ",nl,
indent(9),"{",NextNum,", ",{curr,else},"}",nl]),
emit([indent(3),"end",nl]),
@@ -405,14 +476,17 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
asn1ct_name:new(tlv)
end,
- RecordName = lists:concat([get_record_name_prefix(),
- asn1ct_gen:list2rname(Typename)]),
- case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of
- no_terms -> % an empty sequence
- emit([nl,nl]),
- demit(["Result = "]), %dbg
- %% return value as record
- emit([" {'",RecordName,"'}.",nl]);
+ RecordName0 = lists:concat([get_record_name_prefix(Gen),
+ asn1ct_gen:list2rname(Typename)]),
+ RecordName = list_to_atom(RecordName0),
+ case gen_dec_sequence_call(Gen, Typename, CompList, Ext, DecObjInf) of
+ no_terms -> % an empty SET
+ case Gen of
+ #gen{pack=record} ->
+ emit([nl,nl," {'",RecordName,"'}.",nl,nl]);
+ #gen{pack=map} ->
+ emit([nl,nl," #{}.",nl,nl])
+ end;
{LeadingAttrTerm,PostponedDecArgs} ->
emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
@@ -422,7 +496,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
ok;
{[{ObjSetRef,LeadingAttr,Term}],PostponedDecArgs} ->
DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
- ValueMatch = value_match(ValueIndex,Term),
+ ValueMatch = value_match(Gen, ValueIndex, Term),
{ObjSetMod,ObjSetName} = ObjSetRef,
emit([DecObj," =",nl,
" ",{asis,ObjSetMod},":'getdec_",ObjSetName,"'(",
@@ -441,9 +515,8 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
"}}}) % extra fields not allowed",nl,
"end,",nl])
end,
- emit([" {'",RecordName,"', "]),
- mkvlist(asn1ct_name:all(term)),
- emit(["}.",nl])
+ gen_dec_pack(Gen, RecordName, Typename, CompList),
+ emit([".",nl])
end.
@@ -504,10 +577,8 @@ gen_decode_sof(Erules,TypeName,_InnerTypeName,D) when is_record(D,type) ->
emit([" || ",{curr,v}," <- ",{curr,tlv},"].",nl,nl,nl]).
-gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont)
- when is_record(Cont,type)->
-
- {Objfun,Objfun_novar,EncObj} =
+gen_encode_sof_components(Gen, Typename, SeqOrSetOf, #type{}=Cont) ->
+ {Objfun,Objfun_novar,EncObj} =
case Cont#type.tablecinf of
[{objfun,_}|_R] ->
{", ObjFun",", _",{no_attr,"ObjFun"}};
@@ -517,20 +588,19 @@ gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont)
emit(["'enc_",asn1ct_gen:list2name(Typename),
"_components'([]",Objfun_novar,", AccBytes, AccLen) -> ",nl]),
- case catch lists:member(der,get(encoding_options)) of
- true when SeqOrSetOf=='SET OF'->
+ case {Gen,SeqOrSetOf} of
+ {#gen{der=true},'SET OF'} ->
asn1ct_func:need({ber,dynamicsort_SETOF,1}),
emit([indent(3),
"{dynamicsort_SETOF(AccBytes),AccLen};",nl,nl]);
- _ ->
+ {_,_} ->
emit([indent(3),"{lists:reverse(AccBytes),AccLen};",nl,nl])
end,
emit(["'enc_",asn1ct_gen:list2name(Typename),
"_components'([H|T]",Objfun,",AccBytes, AccLen) ->",nl]),
TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
- gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",3,
-% mandatory,"{EncBytes,EncLen} = ",EncObj),
- mandatory,EncObj),
+ gen_enc_line(Gen, Typename, TypeNameSuffix, Cont, "H", 3,
+ mandatory, EncObj),
emit([",",nl]),
emit([indent(3),"'enc_",asn1ct_gen:list2name(Typename),
"_components'(T",Objfun,","]),
@@ -1028,35 +1098,44 @@ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,Assign,EncObj)
emit([nl,indent(7),"end"])
end.
-gen_optormand_case(mandatory, _Erules, _TopType, _Cname, _Type, _Element) ->
+gen_optormand_case(mandatory, _Gen, _TopType, _Cname, _Type, _Element) ->
ok;
-gen_optormand_case('OPTIONAL', Erules, _TopType, _Cname, _Type, Element) ->
+gen_optormand_case('OPTIONAL', Gen, _TopType, _Cname, _Type, Element) ->
emit([" case ",Element," of",nl]),
- emit([indent(9),"asn1_NOVALUE -> {",
- empty_lb(Erules),",0};",nl]),
+ Missing = case Gen of
+ #gen{pack=record} -> asn1_NOVALUE;
+ #gen{pack=map} -> ?MISSING_IN_MAP
+ end,
+ emit([indent(9),Missing," -> {",
+ empty_lb(Gen),",0};",nl]),
emit([indent(9),"_ ->",nl,indent(12)]);
-gen_optormand_case({'DEFAULT',DefaultValue}, Erules, _TopType,
+gen_optormand_case({'DEFAULT',DefaultValue}, Gen, _TopType,
_Cname, Type, Element) ->
CurrMod = get(currmod),
- case catch lists:member(der,get(encoding_options)) of
- true ->
- asn1ct_gen_check:emit(Type, DefaultValue, Element);
- _ ->
- emit([" case ",Element," of",nl]),
- emit([indent(9),"asn1_DEFAULT -> {",
- empty_lb(Erules),
- ",0};",nl]),
- case DefaultValue of
- #'Externalvaluereference'{module=CurrMod,
- value=V} ->
- emit([indent(9),"?",{asis,V}," -> {",
- empty_lb(Erules),",0};",nl]);
- _ ->
- emit([indent(9),{asis,
- DefaultValue}," -> {",
- empty_lb(Erules),",0};",nl])
- end,
- emit([indent(9),"_ ->",nl,indent(12)])
+ case Gen of
+ #gen{erule=ber,der=true} ->
+ asn1ct_gen_check:emit(Gen, Type, DefaultValue, Element);
+ #gen{erule=ber,der=false,pack=Pack} ->
+ Ind9 = indent(9),
+ DefMarker = case Pack of
+ record -> asn1_DEFAULT;
+ map -> ?MISSING_IN_MAP
+ end,
+ emit([" case ",Element," of",nl,
+ Ind9,{asis,DefMarker}," ->",nl,
+ Ind9,indent(3),"{",empty_lb(Gen),",0};",nl,
+ Ind9,"_ when ",Element," =:= "]),
+ Dv = case DefaultValue of
+ #'Externalvaluereference'{module=CurrMod,
+ value=V} ->
+ ["?",{asis,V}];
+ _ ->
+ [{asis,DefaultValue}]
+ end,
+ emit(Dv++[" ->",nl,
+ Ind9,indent(3),"{",empty_lb(Gen),",0};",nl,
+ Ind9,"_ ->",nl,
+ indent(12)])
end.
%% Use for SEQUENCE OF and CHOICE.
@@ -1207,7 +1286,7 @@ gen_dec_call({typefield,_},_,_,Cname,Type,BytesVar,Tag,_,_,_DecObjInf,OptOrMandC
(Type#type.def)#'ObjectClassFieldType'.fieldname,
[{Cname,RefedFieldName,asn1ct_gen:mk_var(asn1ct_name:curr(term)),
asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
-gen_dec_call(InnerType, _Erules, TopType, Cname, Type, BytesVar,
+gen_dec_call(InnerType, Gen, TopType, Cname, Type, BytesVar,
Tag, _PrimOptOrMand, _OptOrMand, DecObjInf,_) ->
WhatKind = asn1ct_gen:type(InnerType),
gen_dec_call1(WhatKind, InnerType, TopType, Cname,
@@ -1215,7 +1294,7 @@ gen_dec_call(InnerType, _Erules, TopType, Cname, Type, BytesVar,
case DecObjInf of
{Cname,{_,OSet,_UniqueFName,ValIndex}} ->
Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
- ValueMatch = value_match(ValIndex,Term),
+ ValueMatch = value_match(Gen, ValIndex, Term),
{ObjSetMod,ObjSetName} = OSet,
emit([",",nl,"ObjFun = ",{asis,ObjSetMod},":'getdec_",ObjSetName,
"'(",ValueMatch,")"]);
@@ -1340,19 +1419,6 @@ gen_dec_call1(WhatKind, _, TopType, Cname, Type, BytesVar, Tag) ->
indent(N) ->
lists:duplicate(N,32). % 32 = space
-mkcindexlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
- emit(["Cindex",H,Sep]),
- mkcindexlist([T1|T], Sep);
-mkcindexlist([H|T], Sep) ->
- emit(["Cindex",H]),
- mkcindexlist(T, Sep);
-mkcindexlist([], _) ->
- true.
-
-mkcindexlist(L) ->
- mkcindexlist(L,", ").
-
-
mkvlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
emit([{var,H},Sep]),
mkvlist([T1|T], Sep);
@@ -1429,19 +1495,25 @@ mkfuncname(TopType,Cname,WhatKind,Prefix,Suffix) ->
{F, "?MODULE", F}
end.
-empty_lb(ber) ->
+empty_lb(#gen{erule=ber}) ->
"<<>>".
-value_match(Index,Value) when is_atom(Value) ->
- value_match(Index,atom_to_list(Value));
-value_match([],Value) ->
+value_match(#gen{pack=record}, VIs, Value) ->
+ value_match_rec(VIs, Value);
+value_match(#gen{pack=map}, VIs, Value) ->
+ value_match_map(VIs, Value).
+
+value_match_rec([], Value) ->
+ Value;
+value_match_rec([{VI,_}|VIs], Value0) ->
+ Value = value_match_rec(VIs, Value0),
+ lists:concat(["element(",VI,", ",Value,")"]).
+
+value_match_map([], Value) ->
Value;
-value_match([{VI,_}|VIs],Value) ->
- value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
-value_match1(Value,[],Acc,Depth) ->
- Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
-value_match1(Value,[{VI,_}|VIs],Acc,Depth) ->
- value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
+value_match_map([{_,Name}|VIs], Value0) ->
+ Value = value_match_map(VIs, Value0),
+ lists:concat(["maps:get(",Name,", ",Value,")"]).
call(F, Args) ->
asn1ct_func:call(ber, F, Args).
diff --git a/lib/asn1/src/asn1ct_constructed_per.erl b/lib/asn1/src/asn1ct_constructed_per.erl
index a34b25182c..9cd9864b80 100644
--- a/lib/asn1/src/asn1ct_constructed_per.erl
+++ b/lib/asn1/src/asn1ct_constructed_per.erl
@@ -32,17 +32,27 @@
-include("asn1_records.hrl").
%-compile(export_all).
--import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/0]).
--import(asn1ct_func, [call/3]).
+-import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/1]).
+
+-type type_name() :: any().
+
%% ENCODE GENERATOR FOR SEQUENCE TYPE ** **********
-gen_encode_set(Erules,TypeName,D) ->
- gen_encode_constructed(Erules,TypeName,D).
+-spec gen_encode_set(Gen, TypeName, #type{}) -> 'ok' when
+ Gen :: #gen{},
+ TypeName :: type_name().
+
+gen_encode_set(Gen, TypeName, D) ->
+ gen_encode_constructed(Gen, TypeName, D).
+
+-spec gen_encode_sequence(Gen, TypeName, #type{}) -> 'ok' when
+ Gen :: #gen{},
+ TypeName :: type_name().
-gen_encode_sequence(Erules,TypeName,D) ->
- gen_encode_constructed(Erules,TypeName,D).
+gen_encode_sequence(Gen, TypeName, D) ->
+ gen_encode_constructed(Gen, TypeName, D).
gen_encode_constructed(Erule, Typename, #type{}=D) ->
asn1ct_name:start(),
@@ -50,88 +60,23 @@ gen_encode_constructed(Erule, Typename, #type{}=D) ->
asn1ct_imm:enc_cg(Imm, is_aligned(Erule)),
emit([".",nl]).
-gen_encode_constructed_imm(Erule, Typename, #type{}=D) ->
- {ExtAddGroup,TmpCompList,TableConsInfo} =
- case D#type.def of
- #'SEQUENCE'{tablecinf=TCI,components=CL,extaddgroup=ExtAddGroup0} ->
- {ExtAddGroup0,CL,TCI};
- #'SET'{tablecinf=TCI,components=CL} ->
- {undefined,CL,TCI}
- end,
-
- CompList = case ExtAddGroup of
- undefined ->
- TmpCompList;
- _ when is_integer(ExtAddGroup) ->
- %% This is a fake SEQUENCE representing an ExtensionAdditionGroup
- %% Reset the textual order so we get the right
- %% index of the components
- [Comp#'ComponentType'{textual_order=undefined}||
- Comp<-TmpCompList]
- end,
- ExternalImm =
- case Typename of
- ['EXTERNAL'] ->
- Next = asn1ct_gen:mk_var(asn1ct_name:next(val)),
- Curr = asn1ct_gen:mk_var(asn1ct_name:curr(val)),
- asn1ct_name:new(val),
- [{call,ext,transform_to_EXTERNAL1990,[{var,Curr}],{var,Next}}];
- _ ->
- []
- end,
- Aligned = is_aligned(Erule),
- Value0 = make_var(val),
+gen_encode_constructed_imm(Gen, Typename, #type{}=D) ->
+ {CompList,TableConsInfo} = enc_complist(D),
+ ExternalImm = external_imm(Gen, Typename),
Optionals = optionals(to_textual_order(CompList)),
- ImmOptionals = [asn1ct_imm:per_enc_optional(Value0, Opt, Aligned) ||
- Opt <- Optionals],
+ ImmOptionals = enc_optionals(Gen, Optionals),
Ext = extensible_enc(CompList),
+ Aligned = is_aligned(Gen),
ExtImm = case Ext of
{ext,ExtPos,NumExt} when NumExt > 0 ->
- gen_encode_extaddgroup(CompList),
+ gen_encode_extaddgroup(Gen, CompList),
Value = make_var(val),
- asn1ct_imm:per_enc_extensions(Value, ExtPos,
- NumExt, Aligned);
+ enc_extensions(Gen, Value, ExtPos, NumExt, Aligned);
_ ->
[]
end,
- {EncObj,ObjSetImm} =
- case TableConsInfo of
- #simpletableattributes{usedclassfield=Used,
- uniqueclassfield=Unique} when Used /= Unique ->
- {false,[]};
- %% ObjectSet, name of the object set in constraints
- %%
- %%{ObjectSet,AttrN,N,UniqueFieldName} -> %% N is index of attribute that determines constraint
- #simpletableattributes{objectsetname=ObjectSet,
- c_name=AttrN,
- c_index=N,
- usedclassfield=UniqueFieldName,
- uniqueclassfield=UniqueFieldName,
- valueindex=ValueIndex0
- } -> %% N is index of attribute that determines constraint
- {Module,ObjSetName} = ObjectSet,
- #typedef{typespec=#'ObjectSet'{gen=Gen}} =
- asn1_db:dbget(Module, ObjSetName),
- case Gen of
- true ->
- ValueIndex = ValueIndex0 ++ [{N+1,top}],
- Val = make_var(val),
- {ObjSetImm0,Dst} = enc_dig_out_value(ValueIndex, Val),
- {{AttrN,Dst},ObjSetImm0};
- false ->
- {false,[]}
- end;
- _ ->
- case D#type.tablecinf of
- [{objfun,_}|_] ->
- %% when the simpletableattributes was at an outer
- %% level and the objfun has been passed through the
- %% function call
- {{"got objfun through args",{var,"ObjFun"}},[]};
- _ ->
- {false,[]}
- end
- end,
+ MatchImm = enc_map_match(Gen, CompList),
+ {EncObj,ObjSetImm} = enc_table(Gen, TableConsInfo, D),
ImmSetExt =
case Ext of
{ext,_Pos,NumExt2} when NumExt2 > 0 ->
@@ -141,38 +86,195 @@ gen_encode_constructed_imm(Erule, Typename, #type{}=D) ->
_ ->
[]
end,
- ImmBody = gen_enc_components_call(Erule, Typename, CompList, EncObj, Ext),
- ExternalImm ++ ExtImm ++ ObjSetImm ++
+ ImmBody = gen_enc_components_call(Gen, Typename, CompList, EncObj, Ext),
+ ExternalImm ++ MatchImm ++ ExtImm ++ ObjSetImm ++
asn1ct_imm:enc_append([ImmSetExt] ++ ImmOptionals ++ ImmBody).
-gen_encode_extaddgroup(CompList) ->
+external_imm(Gen, ['EXTERNAL']) ->
+ Next = asn1ct_gen:mk_var(asn1ct_name:next(val)),
+ Curr = asn1ct_gen:mk_var(asn1ct_name:curr(val)),
+ asn1ct_name:new(val),
+ F = case Gen of
+ #gen{pack=record} -> transform_to_EXTERNAL1990;
+ #gen{pack=map} -> transform_to_EXTERNAL1990_maps
+ end,
+ [{call,ext,F,[{var,Curr}],{var,Next}}];
+external_imm(_, _) ->
+ [].
+
+enc_extensions(#gen{pack=record}, Value, ExtPos, NumExt, Aligned) ->
+ asn1ct_imm:per_enc_extensions(Value, ExtPos, NumExt, Aligned);
+enc_extensions(#gen{pack=map}, Value, ExtPos, NumExt, Aligned) ->
+ Vars = [{var,lists:concat(["Input@",Pos])} ||
+ Pos <- lists:seq(ExtPos, ExtPos+NumExt-1)],
+ Undefined = atom_to_list(?MISSING_IN_MAP),
+ asn1ct_imm:per_enc_extensions_map(Value, Vars, Undefined, Aligned).
+
+enc_complist(#type{def=Def}) ->
+ case Def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL0,extaddgroup=ExtAddGroup} ->
+ case ExtAddGroup of
+ undefined ->
+ {CL0,TCI};
+ _ when is_integer(ExtAddGroup) ->
+ %% This is a fake SEQUENCE representing an
+ %% ExtensionAdditionGroup. Renumber the textual
+ %% order so we get the right index of the
+ %% components.
+ CL = add_textual_order(CL0),
+ {CL,TCI}
+ end;
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {CL,TCI}
+ end.
+
+enc_table(Gen, #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex0}, _) ->
+ {Module,ObjSetName} = ObjectSet,
+ #typedef{typespec=#'ObjectSet'{gen=MustGen}} =
+ asn1_db:dbget(Module, ObjSetName),
+ case MustGen of
+ true ->
+ ValueIndex = ValueIndex0 ++ [{N+1,'ASN1_top'}],
+ Val = make_var(val),
+ {ObjSetImm,Dst} = enc_dig_out_value(Gen, ValueIndex, Val),
+ {{AttrN,Dst},ObjSetImm};
+ false ->
+ {false,[]}
+ end;
+enc_table(_Gen, #simpletableattributes{}, _) ->
+ {false,[]};
+enc_table(_Gen, _, #type{tablecinf=TCInf}) ->
+ case TCInf of
+ [{objfun,_}|_] ->
+ %% The simpletableattributes was at an outer
+ %% level and the objfun has been passed through the
+ %% function call.
+ {{"got objfun through args",{var,"ObjFun"}},[]};
+ _ ->
+ {false,[]}
+ end.
+
+enc_optionals(Gen, Optionals) ->
+ Var = make_var(val),
+ enc_optionals_1(Gen, Optionals, Var).
+
+enc_optionals_1(#gen{pack=record}=Gen, [{Pos,DefVals}|T], Var) ->
+ {Imm0,Element} = asn1ct_imm:enc_element(Pos+1, Var),
+ Imm = asn1ct_imm:per_enc_optional(Element, DefVals),
+ [Imm0++Imm|enc_optionals_1(Gen, T, Var)];
+enc_optionals_1(#gen{pack=map}=Gen, [{Pos,DefVals0}|T], V) ->
+ Var = {var,lists:concat(["Input@",Pos])},
+ DefVals = translate_missing_value(Gen, DefVals0),
+ Imm = asn1ct_imm:per_enc_optional(Var, DefVals),
+ [Imm|enc_optionals_1(Gen, T, V)];
+enc_optionals_1(_, [], _) ->
+ [].
+
+enc_map_match(#gen{pack=record}, _Cs) ->
+ [];
+enc_map_match(#gen{pack=map}, Cs0) ->
+ Var0 = "Input",
+ Cs = enc_flatten_components(Cs0),
+ M = [[quote_atom(Name),":=",lists:concat([Var0,"@",Order])] ||
+ #'ComponentType'{prop=mandatory,name=Name,
+ textual_order=Order} <- Cs],
+ Mand = case M of
+ [] ->
+ [];
+ [_|_] ->
+ Patt = {expr,lists:flatten(["#{",lists:join(",", M),"}"])},
+ [{assign,Patt,{var,asn1ct_name:curr(val)}}]
+ end,
+
+ Os0 = [{Name,Order} ||
+ #'ComponentType'{prop=Prop,name=Name,
+ textual_order=Order} <- Cs,
+ Prop =/= mandatory],
+ {var,Val} = make_var(val),
+ F = fun({Name,Order}) ->
+ Var = lists:concat([Var0,"@",Order]),
+ P0 = ["case ",Val," of\n"
+ " #{",quote_atom(Name),":=",Var,"_0} -> ",
+ Var,"_0;\n"
+ " _ -> ",atom_to_list(?MISSING_IN_MAP),"\n"
+ "end"],
+ P = lists:flatten(P0),
+ {assign,{var,Var},P}
+ end,
+ Os = [F(O) || O <- Os0],
+ Mand ++ Os.
+
+enc_flatten_components({Root1,Ext0,Root2}=CL) ->
+ {_,Gs} = extgroup_pos_and_length(CL),
+ Ext = wrap_extensionAdditionGroups(Ext0, Gs),
+ Root1 ++ Root2 ++ [mark_optional(C) || C <- Ext];
+enc_flatten_components({Root,Ext}) ->
+ enc_flatten_components({Root,Ext,[]});
+enc_flatten_components(Cs) ->
+ Cs.
+
+gen_encode_extaddgroup(#gen{pack=record}, CompList) ->
case extgroup_pos_and_length(CompList) of
{extgrouppos,[]} ->
ok;
{extgrouppos,ExtGroupPosLenList} ->
- _ = [do_gen_encode_extaddgroup(G) || G <- ExtGroupPosLenList],
+ _ = [gen_encode_eag_record(G) ||
+ G <- ExtGroupPosLenList],
ok
- end.
+ end;
+gen_encode_extaddgroup(#gen{pack=map}, Cs0) ->
+ Cs = enc_flatten_components(Cs0),
+ gen_encode_eag_map(Cs).
+
+gen_encode_eag_map([#'ComponentType'{name=Group,typespec=Type}|Cs]) ->
+ case Type of
+ #type{def=#'SEQUENCE'{extaddgroup=G,components=GCs0}}
+ when is_integer(G) ->
+ Ns = [N || #'ComponentType'{name=N,prop=mandatory} <- GCs0],
+ test_for_mandatory(Ns, Group),
+ gen_encode_eag_map(Cs);
+ _ ->
+ gen_encode_eag_map(Cs)
+ end;
+gen_encode_eag_map([]) ->
+ ok.
+
+test_for_mandatory([Mand|_], Group) ->
+ emit([{next,val}," = case ",{curr,val}," of",nl,
+ "#{",quote_atom(Mand),":=_} -> ",
+ {curr,val},"#{",{asis,Group},"=>",{curr,val},"};",nl,
+ "#{} -> ",{curr,val},nl,
+ "end,",nl]),
+ asn1ct_name:new(val);
+test_for_mandatory([], _) ->
+ ok.
-do_gen_encode_extaddgroup({ActualGroupPos,GroupVirtualPos,GroupLen}) ->
+gen_encode_eag_record({ActualPos,VirtualPos,Len}) ->
Val = asn1ct_gen:mk_var(asn1ct_name:curr(val)),
- Elements = make_elements(GroupVirtualPos+1,
- Val,
- lists:seq(1, GroupLen)),
- Expr = any_non_value(GroupVirtualPos+1, Val, GroupLen, ""),
+ Elements = get_input_vars(Val, VirtualPos, Len),
+ Expr = any_non_value(Val, VirtualPos, Len),
emit([{next,val}," = case ",Expr," of",nl,
- "false -> setelement(",{asis,ActualGroupPos+1},", ",
+ "false -> setelement(",{asis,ActualPos+1},", ",
{curr,val},", asn1_NOVALUE);",nl,
- "true -> setelement(",{asis,ActualGroupPos+1},", ",
+ "true -> setelement(",{asis,ActualPos+1},", ",
{curr,val},", {extaddgroup,", Elements,"})",nl,
"end,",nl]),
asn1ct_name:new(val).
-any_non_value(_, _, 0, _) ->
+any_non_value(Val, Pos, N) ->
+ L = any_non_value_1(Val, Pos, N),
+ lists:join(" orelse ", L).
+
+any_non_value_1(_, _, 0) ->
[];
-any_non_value(Pos, Val, N, Sep) ->
- Sep ++ [make_element(Pos, Val)," =/= asn1_NOVALUE"] ++
- any_non_value(Pos+1, Val, N-1, [" orelse",nl]).
+any_non_value_1(Val, Pos, N) ->
+ Var = get_input_var(Val, Pos),
+ [Var ++ " =/= asn1_NOVALUE"|any_non_value_1(Val, Pos+1, N-1)].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% generate decode function for SEQUENCE and SET
@@ -306,55 +408,105 @@ gen_dec_constructed_imm(Erule, Typename, #type{}=D) ->
{DecObjInf,_,_} = ObjSetInfo,
EmitComp = gen_dec_components_call(Erule, Typename, CompList,
DecObjInf, Ext, length(Optionals)),
- EmitRest = fun({AccTerm,AccBytes}) ->
- gen_dec_constructed_imm_2(Erule, Typename,
- CompList,
- ObjSetInfo,
- AccTerm, AccBytes)
- end,
- [EmitExt,EmitOpt|EmitComp++[{safe,EmitRest}]].
+ EmitObjSets = gen_dec_objsets_fun(Erule, ObjSetInfo),
+ EmitPack = fun(_) ->
+ gen_dec_pack(Erule, Typename, CompList)
+ end,
+ RestGroup = {group,[{safe,EmitObjSets},{safe,EmitPack}]},
+ [EmitExt,EmitOpt|EmitComp++[RestGroup]].
+
+gen_dec_objsets_fun(Gen, ObjSetInfo) ->
+ fun({AccTerm,AccBytes}) ->
+ {_,_UniqueFName,ValueIndex} = ObjSetInfo,
+ case {AccTerm,AccBytes} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],ListOfOpenTypes} ->
+ ValueMatch = value_match(Gen, ValueIndex, Term),
+ _ = [begin
+ gen_dec_open_type(Gen, ValueMatch, ObjSet,
+ LeadingAttr, T),
+ emit([com,nl])
+ end || T <- ListOfOpenTypes],
+ ok
+ end
+ end.
-gen_dec_constructed_imm_2(Erule, Typename, CompList,
- ObjSetInfo, AccTerm, AccBytes) ->
- {_,_UniqueFName,ValueIndex} = ObjSetInfo,
- case {AccTerm,AccBytes} of
- {[],[]} ->
- ok;
- {_,[]} ->
- ok;
- {[{ObjSet,LeadingAttr,Term}],ListOfOpenTypes} ->
- ValueMatch = value_match(ValueIndex, Term),
- _ = [begin
- gen_dec_open_type(Erule, ValueMatch, ObjSet,
- LeadingAttr, T),
- emit([com,nl])
- end || T <- ListOfOpenTypes],
- ok
- end,
- %% we don't return named lists any more Cnames = mkcnamelist(CompList),
- demit({"Result = "}), %dbg
- %% return value as record
- RecordName = record_name(Typename),
+gen_dec_pack(Gen, Typename, CompList) ->
case Typename of
['EXTERNAL'] ->
- emit({" OldFormat={'",RecordName,
- "'"}),
- mkvlist(asn1ct_name:all(term)),
- emit({"},",nl}),
- emit([" ASN11994Format =",nl,
- " ",
- {call,ext,transform_to_EXTERNAL1994,
- ["OldFormat"]},com,nl]),
- emit(" {ASN11994Format,");
+ dec_external(Gen, Typename);
_ ->
- emit(["{{'",RecordName,"'"]),
- %% CompList is used here because we don't want
- %% ExtensionAdditionGroups to be wrapped in SEQUENCES when
- %% we are ordering the fields according to textual order
- mkvlist(textual_order(to_encoding_order(CompList),asn1ct_name:all(term))),
- emit("},")
- end,
- emit({{curr,bytes},"}"}).
+ asn1ct_name:new(res),
+ gen_dec_do_pack(Gen, Typename, CompList),
+ emit([com,nl,
+ "{",{curr,res},",",{curr,bytes},"}"])
+ end.
+
+dec_external(#gen{pack=record}=Gen, Typename) ->
+ RecordName = list_to_atom(record_name(Gen, Typename)),
+ All = [{var,Term} || Term <- asn1ct_name:all(term)],
+ Record = [{asis,RecordName}|All],
+ emit(["OldFormat={",lists:join(",", Record),"},",nl,
+ "ASN11994Format =",nl,
+ {call,ext,transform_to_EXTERNAL1994,
+ ["OldFormat"]},com,nl,
+ "{ASN11994Format,",{curr,bytes},"}"]);
+dec_external(#gen{pack=map}, _Typename) ->
+ Vars = asn1ct_name:all(term),
+ Names = ['direct-reference','indirect-reference',
+ 'data-value-descriptor',encoding],
+ Zipped = lists:zip(Names, Vars),
+ MapInit = lists:join(",", [["'",N,"'=>",{var,V}] || {N,V} <- Zipped]),
+ emit(["OldFormat = #{",MapInit,"}",com,nl,
+ "ASN11994Format =",nl,
+ {call,ext,transform_to_EXTERNAL1994_maps,
+ ["OldFormat"]},com,nl,
+ "{ASN11994Format,",{curr,bytes},"}"]).
+
+gen_dec_do_pack(#gen{pack=record}=Gen, TypeName, CompList) ->
+ Zipped0 = zip_components(CompList, asn1ct_name:all(term)),
+ Zipped = textual_order(Zipped0),
+ RecordName = ["'",record_name(Gen, TypeName),"'"],
+ L = [RecordName|[{var,Var} || {_,Var} <- Zipped]],
+ emit([{curr,res}," = {",lists:join(",", L),"}"]);
+gen_dec_do_pack(#gen{pack=map}, _, CompList0) ->
+ CompList = enc_flatten_components(CompList0),
+ Zipped0 = zip_components(CompList, asn1ct_name:all(term)),
+ Zipped = textual_order(Zipped0),
+ PF = fun({#'ComponentType'{prop='OPTIONAL'},_}) -> false;
+ ({_,_}) -> true
+ end,
+ {Mandatory,Optional} = lists:partition(PF, Zipped),
+ L = [[{asis,Name},"=>",{var,Var}] ||
+ {#'ComponentType'{name=Name},Var} <- Mandatory],
+ emit([{curr,res}," = #{",lists:join(",", L),"}"]),
+ gen_dec_map_optional(Optional),
+ gen_dec_merge_maps(asn1ct_name:all(map)).
+
+gen_dec_map_optional([{#'ComponentType'{name=Name},Var}|T]) ->
+ asn1ct_name:new(res),
+ emit([com,nl,
+ {curr,res}," = case ",{var,Var}," of",nl,
+ " asn1_NOVALUE -> ",{prev,res},";",nl,
+ " _ -> ",{prev,res},"#{",{asis,Name},"=>",{var,Var},"}",nl,
+ "end"]),
+ gen_dec_map_optional(T);
+gen_dec_map_optional([]) ->
+ ok.
+
+gen_dec_merge_maps([M|Ms]) ->
+ asn1ct_name:new(res),
+ emit([com,nl,
+ {curr,res}," = maps:merge(",{prev,res},", ",{var,M},")"]),
+ gen_dec_merge_maps(Ms);
+gen_dec_merge_maps([]) ->
+ ok.
+
+quote_atom(A) when is_atom(A) ->
+ io_lib:format("~p", [A]).
%% record_name([TypeName]) -> RecordNameString
%% Construct a record name for the constructed type, ignoring any
@@ -362,10 +514,10 @@ gen_dec_constructed_imm_2(Erule, Typename, CompList,
%% group. Such fake sequences never appear as a top type, and their
%% name always start with "ExtAddGroup".
-record_name(Typename0) ->
+record_name(Gen, Typename0) ->
[TopType|Typename1] = lists:reverse(Typename0),
Typename = filter_ext_add_groups(Typename1, [TopType]),
- lists:concat([get_record_name_prefix(),
+ lists:concat([get_record_name_prefix(Gen),
asn1ct_gen:list2rname(Typename)]).
filter_ext_add_groups([H|T], Acc) when is_atom(H) ->
@@ -379,17 +531,26 @@ filter_ext_add_groups([H|T], Acc) ->
filter_ext_add_groups(T, [H|Acc]);
filter_ext_add_groups([], Acc) -> Acc.
-textual_order([#'ComponentType'{textual_order=undefined}|_],TermList) ->
- TermList;
-textual_order(CompList,TermList) when is_list(CompList) ->
- OrderList = [Ix||#'ComponentType'{textual_order=Ix} <- CompList],
- [Term||{_,Term}<-
- lists:sort(lists:zip(OrderList,
- lists:sublist(TermList,length(OrderList))))];
- %% sublist is just because Termlist can sometimes be longer than
- %% OrderList, which it really shouldn't
-textual_order({Root,Ext},TermList) ->
- textual_order(Root ++ Ext,TermList).
+zip_components({Root,Ext}, Vars) ->
+ zip_components({Root,Ext,[]}, Vars);
+zip_components({R1,Ext0,R2}, Vars) ->
+ Ext = [mark_optional(C) || C <- Ext0],
+ zip_components(R1++R2++Ext, Vars);
+zip_components(Cs, Vars) when is_list(Cs) ->
+ zip_components_1(Cs, Vars).
+
+zip_components_1([#'ComponentType'{}=C|Cs], [V|Vs]) ->
+ [{C,V}|zip_components_1(Cs, Vs)];
+zip_components_1([_|Cs], Vs) ->
+ zip_components_1(Cs, Vs);
+zip_components_1([], []) ->
+ [].
+
+textual_order([{#'ComponentType'{textual_order=undefined},_}|_]=L) ->
+ L;
+textual_order(L0) ->
+ L = [{Ix,P} || {#'ComponentType'{textual_order=Ix},_}=P <- L0],
+ [C || {_,C} <- lists:sort(L)].
to_textual_order({Root,Ext}) ->
{to_textual_order(Root),Ext};
@@ -458,7 +619,7 @@ dec_objset_default(N, _, _, true) ->
end]).
dec_objset_1(Erule, N, {Id,Obj}, RestFields, Typename) ->
- emit([{asis,N},"(Bytes, ",{asis,Id},") ->",nl]),
+ emit([{asis,N},"(Bytes, Id) when Id =:= ",{asis,Id}," ->",nl]),
dec_objset_2(Erule, Obj, RestFields, Typename).
dec_objset_2(Erule, Obj, RestFields0, Typename) ->
@@ -595,8 +756,7 @@ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D) ->
emit([",",nl,
{asis,F},"(",Num,", ",Buf,ObjFun,", [])"]).
-is_aligned(per) -> true;
-is_aligned(uper) -> false.
+is_aligned(#gen{erule=per,aligned=Aligned}) -> Aligned.
gen_decode_length(Constraint, Erule) ->
emit(["%% Length with constraint ",{asis,Constraint},nl]),
@@ -640,22 +800,7 @@ gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-% General and special help functions (not exported)
-
-mkvlist([H|T]) ->
- emit(","),
- mkvlist2([H|T]);
-mkvlist([]) ->
- true.
-mkvlist2([H,T1|T]) ->
- emit({{var,H},","}),
- mkvlist2([T1|T]);
-mkvlist2([H|T]) ->
- emit({{var,H}}),
- mkvlist2(T);
-mkvlist2([]) ->
- true.
-
+%% General and special help functions (not exported)
extensible_dec(CompList) when is_list(CompList) ->
noext;
@@ -728,28 +873,26 @@ gen_dec_optionals(Optionals) ->
{imm,Imm0,E}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% Produce a list with positions (in the Value record) where
-%% there are optional components, start with 2 because first element
-%% is the record name
-
-optionals({L1,Ext,L2}) ->
- Opt1 = optionals(L1,[],2),
- ExtComps = length([C||C = #'ComponentType'{}<-Ext]),
- Opt2 = optionals(L2,[],2+length(L1)+ExtComps),
- Opt1 ++ Opt2;
-optionals({L,_Ext}) -> optionals(L,[],2);
-optionals(L) -> optionals(L,[],2).
-optionals([#'ComponentType'{prop='OPTIONAL'}|Rest], Acc, Pos) ->
- optionals(Rest, [Pos|Acc], Pos+1);
-optionals([#'ComponentType'{typespec=T,prop={'DEFAULT',Val}}|Rest],
- Acc, Pos) ->
+optionals({Root1,Ext,Root2}) ->
+ Opt1 = optionals(Root1, 1),
+ ExtComps = length([C || C = #'ComponentType'{} <- Ext]),
+ Opt2 = optionals(Root2, 1 + length(Root1) + ExtComps),
+ Opt1 ++ Opt2;
+optionals({L,_Ext}) ->
+ optionals(L, 1);
+optionals(L) ->
+ optionals(L, 1).
+
+optionals([#'ComponentType'{prop='OPTIONAL'}|Rest], Pos) ->
+ [{Pos,[asn1_NOVALUE]}|optionals(Rest, Pos+1)];
+optionals([#'ComponentType'{typespec=T,prop={'DEFAULT',Val}}|Cs], Pos) ->
Vals = def_values(T, Val),
- optionals(Rest, [{Pos,Vals}|Acc], Pos+1);
-optionals([#'ComponentType'{}|Rest], Acc, Pos) ->
- optionals(Rest, Acc, Pos+1);
-optionals([], Acc, _) ->
- lists:reverse(Acc).
+ [{Pos,Vals}|optionals(Cs, Pos+1)];
+optionals([#'ComponentType'{}|Rest], Pos) ->
+ optionals(Rest, Pos+1);
+optionals([], _) ->
+ [].
%%%%%%%%%%%%%%%%%%%%%%
%% create_optionality_table(Cs=[#'ComponentType'{textual_order=undefined}|_]) ->
@@ -779,13 +922,6 @@ get_optionality_pos(TextPos,OptTable) ->
no_num
end.
-to_encoding_order(Cs) when is_list(Cs) ->
- Cs;
-to_encoding_order(Cs = {_Root,_Ext}) ->
- Cs;
-to_encoding_order({R1,Ext,R2}) ->
- {R1++R2,Ext}.
-
add_textual_order(Cs) when is_list(Cs) ->
{NewCs,_} = add_textual_order1(Cs,1),
NewCs;
@@ -810,69 +946,81 @@ add_textual_order1(Cs,NumIn) ->
end,
NumIn,Cs).
-gen_enc_components_call(Erule,TopType,{Root,ExtList}, DynamicEnc,Ext) ->
- gen_enc_components_call(Erule,TopType,{Root,ExtList,[]}, DynamicEnc,Ext);
-gen_enc_components_call(Erule,TopType,CL={Root,ExtList,Root2}, DynamicEnc,Ext) ->
- %% The type has extensionmarker
- {Imm0,Rpos} = gen_enc_components_call1(Erule,TopType,Root++Root2,1, DynamicEnc,noext,[]),
+gen_enc_components_call(Erule, TopType, {Root,ExtList}, DynamicEnc, Ext) ->
+ gen_enc_components_call(Erule, TopType, {Root,ExtList,[]}, DynamicEnc, Ext);
+gen_enc_components_call(Erule, TopType, {R1,ExtList0,R2}=CL, DynamicEnc, Ext) ->
+ Root = R1 ++ R2,
+ Imm0 = gen_enc_components_call1(Erule, TopType, Root, DynamicEnc, noext),
ExtImm = case Ext of
{ext,_,ExtNum} when ExtNum > 0 ->
[{var,"Extensions"}];
_ ->
[]
end,
- %handle extensions
{extgrouppos,ExtGroupPosLen} = extgroup_pos_and_length(CL),
- NewExtList = wrap_extensionAdditionGroups(ExtList,ExtGroupPosLen),
- {Imm1,_} = gen_enc_components_call1(Erule,TopType,NewExtList,Rpos,DynamicEnc,Ext,[]),
+ ExtList1 = wrap_extensionAdditionGroups(ExtList0, ExtGroupPosLen),
+ ExtList = [mark_optional(C) || C <- ExtList1],
+ Imm1 = gen_enc_components_call1(Erule, TopType, ExtList, DynamicEnc, Ext),
Imm0 ++ [ExtImm|Imm1];
-gen_enc_components_call(Erule,TopType, CompList, DynamicEnc, Ext) ->
- %% The type has no extensionmarker
- {Imm,_} = gen_enc_components_call1(Erule,TopType,CompList,1,DynamicEnc,Ext,[]),
- Imm.
+gen_enc_components_call(Erule, TopType, CompList, DynamicEnc, Ext) ->
+ %% No extension marker.
+ gen_enc_components_call1(Erule, TopType, CompList, DynamicEnc, Ext).
+
+mark_optional(#'ComponentType'{prop=Prop0}=C) ->
+ Prop = case Prop0 of
+ mandatory -> 'OPTIONAL';
+ 'OPTIONAL'=Keep -> Keep;
+ {'DEFAULT',_}=Keep -> Keep
+ end,
+ C#'ComponentType'{prop=Prop};
+mark_optional(Other) ->
+ Other.
+
+gen_enc_components_call1(Gen, TopType, [C|Rest], DynamicEnc, Ext) ->
+ #'ComponentType'{name=Cname,typespec=Type,
+ prop=Prop,textual_order=Num} = C,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
+ CommentString = attribute_comment(InnerType, Num, Cname),
+ ImmComment = asn1ct_imm:enc_comment(CommentString),
-gen_enc_components_call1(Erule,TopType,
- [C=#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],
- Tpos,
- DynamicEnc, Ext, Acc) ->
-
- TermNo =
- case C#'ComponentType'.textual_order of
- undefined ->
- Tpos;
- CanonicalNum ->
- CanonicalNum
- end,
- Val = make_var(val),
- {Imm0,Element} = asn1ct_imm:enc_element(TermNo+1, Val),
- Imm1 = gen_enc_line_imm(Erule, TopType, Cname, Type, Element, DynamicEnc, Ext),
- Category = case {Prop,Ext} of
- {'OPTIONAL',_} ->
- optional;
- {{'DEFAULT',DefVal},_} ->
- {default,DefVal};
- {_,{ext,ExtPos,_}} when Tpos >= ExtPos ->
- optional;
- {_,_} ->
- mandatory
- end,
- Imm2 = case Category of
+ {Imm0,Element} = enc_fetch_field(Gen, Num, Prop),
+ Imm1 = gen_enc_line_imm(Gen, TopType, Cname, Type,
+ Element, DynamicEnc, Ext),
+ Imm2 = case Prop of
mandatory ->
Imm1;
- optional ->
- asn1ct_imm:enc_absent(Element, [asn1_NOVALUE], Imm1);
- {default,Def} ->
+ 'OPTIONAL' ->
+ enc_absent(Gen, Element, [asn1_NOVALUE], Imm1);
+ {'DEFAULT',Def} ->
DefValues = def_values(Type, Def),
- asn1ct_imm:enc_absent(Element, DefValues, Imm1)
+ enc_absent(Gen, Element, DefValues, Imm1)
end,
Imm = case Imm2 of
[] -> [];
- _ -> Imm0 ++ Imm2
+ _ -> [ImmComment|Imm0 ++ Imm2]
end,
- gen_enc_components_call1(Erule, TopType, Rest, Tpos+1, DynamicEnc, Ext, [Imm|Acc]);
-gen_enc_components_call1(_Erule,_TopType,[],Pos,_,_, Acc) ->
- ImmList = lists:reverse(Acc),
- {ImmList,Pos}.
+ [Imm|gen_enc_components_call1(Gen, TopType, Rest, DynamicEnc, Ext)];
+gen_enc_components_call1(_Gen, _TopType, [], _, _) ->
+ [].
+
+enc_absent(Gen, Var, Absent0, Imm) ->
+ Absent = translate_missing_value(Gen, Absent0),
+ asn1ct_imm:enc_absent(Var, Absent, Imm).
+
+translate_missing_value(#gen{pack=record}, Optionals) ->
+ Optionals;
+translate_missing_value(#gen{pack=map}, Optionals) ->
+ case Optionals of
+ [asn1_NOVALUE|T] -> [?MISSING_IN_MAP|T];
+ [asn1_DEFAULT|T] -> [?MISSING_IN_MAP|T];
+ {call,_,_,_} -> Optionals
+ end.
+
+enc_fetch_field(#gen{pack=record}, Num, _Prop) ->
+ Val = make_var(val),
+ asn1ct_imm:enc_element(Num+1, Val);
+enc_fetch_field(#gen{pack=map}, Num, _) ->
+ {[],{var,lists:concat(["Input@",Num])}}.
def_values(#type{def=#'Externaltypereference'{module=Mod,type=Type}}, Def) ->
#typedef{typespec=T} = asn1_db:dbget(Mod, Type),
@@ -1115,27 +1263,31 @@ gen_dec_components_call(Erule, TopType, {Root,ExtList},
DecInfObj, Ext, NumberOfOptionals) ->
gen_dec_components_call(Erule,TopType,{Root,ExtList,[]},
DecInfObj,Ext,NumberOfOptionals);
-gen_dec_components_call(Erule,TopType,CL={Root1,ExtList,Root2},
- DecInfObj,Ext,NumberOfOptionals) ->
+gen_dec_components_call(Gen, TopType, {Root1,ExtList,Root2}=CL,
+ DecInfObj, Ext, NumberOfOptionals) ->
%% The type has extensionmarker
OptTable = create_optionality_table(Root1++Root2),
Init = {ignore,fun(_) -> {[],[]} end},
{EmitRoot,Tpos} =
- gen_dec_comp_calls(Root1++Root2, Erule, TopType, OptTable,
+ gen_dec_comp_calls(Root1++Root2, Gen, TopType, OptTable,
DecInfObj, noext, NumberOfOptionals,
1, []),
- EmitGetExt = gen_dec_get_extension(Erule),
+ EmitGetExt = gen_dec_get_extension(Gen),
{extgrouppos,ExtGroupPosLen} = extgroup_pos_and_length(CL),
NewExtList = wrap_extensionAdditionGroups(ExtList, ExtGroupPosLen),
- {EmitExts,_} = gen_dec_comp_calls(NewExtList, Erule, TopType, OptTable,
+ {EmitExts,_} = gen_dec_comp_calls(NewExtList, Gen, TopType, OptTable,
DecInfObj, Ext, NumberOfOptionals,
Tpos, []),
NumExtsToSkip = ext_length(ExtList),
Finish =
fun(St) ->
emit([{next,bytes},"= "]),
- call(Erule, skipextensions,
- [{curr,bytes},NumExtsToSkip+1,"Extensions"]),
+ Mod = case Gen of
+ #gen{erule=per,aligned=false} -> uper;
+ #gen{erule=per,aligned=true} -> per
+ end,
+ asn1ct_func:call(Mod, skipextensions,
+ [{curr,bytes},NumExtsToSkip+1,"Extensions"]),
asn1ct_name:new(bytes),
St
end,
@@ -1178,29 +1330,19 @@ gen_dec_comp_calls([C|Cs], Erule, TopType, OptTable, DecInfObj,
gen_dec_comp_calls([], _, _, _, _, _, _, Tpos, Acc) ->
{lists:append(lists:reverse(Acc)),Tpos}.
-gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj,
+gen_dec_comp_call(Comp, Gen, TopType, Tpos, OptTable, DecInfObj,
Ext, NumberOfOptionals) ->
- #'ComponentType'{typespec=Type,prop=Prop,textual_order=TextPos} = Comp,
+ #'ComponentType'{name=Cname,typespec=Type,
+ prop=Prop,textual_order=TextPos} = Comp,
Pos = case Ext of
noext -> Tpos;
{ext,Epos,_Enum} -> Tpos - Epos + 1
end,
- InnerType =
- case Type#type.def of
- #'ObjectClassFieldType'{type=InType} ->
- InType;
- Def ->
- asn1ct_gen:get_inner(Def)
- end,
+ InnerType = asn1ct_gen:get_inner(Type#type.def),
- DispType = case InnerType of
- #'Externaltypereference'{type=T} -> T;
- IT when is_tuple(IT) -> element(2,IT);
- _ -> InnerType
- end,
+ CommentString = attribute_comment(InnerType, TextPos, Cname),
Comment = fun(St) ->
- emit([nl,"%% attribute number ",TextPos,
- " with type ",DispType,nl]),
+ emit([nl,"%% ",CommentString,nl]),
St
end,
@@ -1219,15 +1361,9 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj,
_ ->
case Type of
#type{def=#'SEQUENCE'{
- extaddgroup=Number1,
- components=ExtGroupCompList1}} when is_integer(Number1)->
- fun(St) ->
- emit(["{{_,"]),
- emit_extaddgroupTerms(term,ExtGroupCompList1),
- emit(["}"]),
- emit([",",{next,bytes},"} = "]),
- St
- end;
+ extaddgroup=GroupNum,
+ components=CompList}} when is_integer(GroupNum)->
+ dec_match_extadd_fun(Gen, CompList);
_ ->
fun(St) ->
asn1ct_name:new(term),
@@ -1237,9 +1373,9 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj,
end
end
end,
- {Pre,Post} = comp_call_pre_post(Ext, Prop, Pos, Type, TextPos,
+ {Pre,Post} = comp_call_pre_post(Gen, Ext, Prop, Pos, Type, TextPos,
OptTable, NumberOfOptionals, Ext),
- Lines = gen_dec_seq_line_imm(Erule, TopType, Comp, Tpos, DecInfObj, Ext),
+ Lines = gen_dec_seq_line_imm(Gen, TopType, Comp, Tpos, DecInfObj, Ext),
AdvBuffer = {ignore,fun(St) ->
asn1ct_name:new(bytes),
St
@@ -1247,9 +1383,24 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj,
[{group,[{safe,Comment},{safe,Preamble}] ++ Pre ++
Lines ++ Post ++ [{safe,AdvBuffer}]}].
-comp_call_pre_post(noext, mandatory, _, _, _, _, _, _) ->
+dec_match_extadd_fun(#gen{pack=record}, CompList) ->
+ fun(St) ->
+ emit(["{{_,"]),
+ emit_extaddgroupTerms(term, CompList),
+ emit(["}"]),
+ emit([",",{next,bytes},"} = "]),
+ St
+ end;
+dec_match_extadd_fun(#gen{pack=map}, _CompList) ->
+ fun(St) ->
+ asn1ct_name:new(map),
+ emit(["{",{curr,map},",",{next,bytes},"} = "]),
+ St
+ end.
+
+comp_call_pre_post(_Gen, noext, mandatory, _, _, _, _, _, _) ->
{[],[]};
-comp_call_pre_post(noext, Prop, _, Type, TextPos,
+comp_call_pre_post(_Gen, noext, Prop, _, Type, TextPos,
OptTable, NumOptionals, Ext) ->
%% OPTIONAL or DEFAULT
OptPos = get_optionality_pos(TextPos, OptTable),
@@ -1273,32 +1424,53 @@ comp_call_pre_post(noext, Prop, _, Type, TextPos,
"end"]),
St
end]};
-comp_call_pre_post({ext,_,_}, Prop, Pos, Type, _, _, _, Ext) ->
+comp_call_pre_post(Gen, {ext,_,_}, Prop, Pos, Type, _, _, _, Ext) ->
%% Extension
{[fun(St) ->
emit(["case Extensions of",nl,
" <<_:",Pos-1,",1:1,_/bitstring>> ->",nl]),
St
end],
- [fun(St) ->
- emit([";",nl,
- "_ ->",nl,
- "{"]),
- case Type of
- #type{def=#'SEQUENCE'{
- extaddgroup=Number2,
- components=ExtGroupCompList2}}
- when is_integer(Number2)->
- emit("{extAddGroup,"),
- gen_dec_extaddGroup_no_val(Ext, Type, ExtGroupCompList2),
- emit("}");
- _ ->
- gen_dec_component_no_val(Ext, Type, Prop)
- end,
- emit([",",{curr,bytes},"}",nl,
- "end"]),
- St
- end]}.
+ [extadd_group_fun(Gen, Prop, Type, Ext)]}.
+
+extadd_group_fun(#gen{pack=record}, Prop, Type, Ext) ->
+ fun(St) ->
+ emit([";",nl,
+ "_ ->",nl,
+ "{"]),
+ case Type of
+ #type{def=#'SEQUENCE'{
+ extaddgroup=Number2,
+ components=ExtGroupCompList2}}
+ when is_integer(Number2)->
+ emit("{extAddGroup,"),
+ gen_dec_extaddGroup_no_val(Ext, Type, ExtGroupCompList2),
+ emit("}");
+ _ ->
+ gen_dec_component_no_val(Ext, Type, Prop)
+ end,
+ emit([",",{curr,bytes},"}",nl,
+ "end"]),
+ St
+ end;
+extadd_group_fun(#gen{pack=map}, Prop, Type, Ext) ->
+ fun(St) ->
+ emit([";",nl,
+ "_ ->",nl,
+ "{"]),
+ case Type of
+ #type{def=#'SEQUENCE'{
+ extaddgroup=Number2,
+ components=Comp}}
+ when is_integer(Number2)->
+ dec_map_extaddgroup_no_val(Ext, Type, Comp);
+ _ ->
+ gen_dec_component_no_val(Ext, Type, Prop)
+ end,
+ emit([",",{curr,bytes},"}",nl,
+ "end"]),
+ St
+ end.
is_mandatory_predef_tab_c(noext, mandatory,
{"got objfun through args","ObjFun"}) ->
@@ -1325,7 +1497,20 @@ gen_dec_component_no_val(_, _, 'OPTIONAL') ->
emit({"asn1_NOVALUE"});
gen_dec_component_no_val({ext,_,_}, _, mandatory) ->
emit({"asn1_NOVALUE"}).
-
+
+dec_map_extaddgroup_no_val(Ext, Type, Comp) ->
+ L0 = [dec_map_extaddgroup_no_val_1(N, P, Ext, Type) ||
+ #'ComponentType'{name=N,prop=P} <- Comp],
+ L = [E || E <- L0, E =/= []],
+ emit(["#{",lists:join(",", L),"}"]).
+
+dec_map_extaddgroup_no_val_1(Name, {'DEFAULT',DefVal0}, _Ext, Type) ->
+ DefVal = asn1ct_gen:conform_value(Type, DefVal0),
+ [Name,"=>",{asis,DefVal}];
+dec_map_extaddgroup_no_val_1(_Name, 'OPTIONAL', _, _) ->
+ [];
+dec_map_extaddgroup_no_val_1(_Name, mandatory, {ext,_,_}, _) ->
+ [].
gen_dec_choice_line(Erule, TopType, Comp, Pre) ->
Imm0 = gen_dec_line_imm(Erule, TopType, Comp, false, Pre),
@@ -1461,29 +1646,29 @@ gen_dec_line_special(Erule, {typefield,_}, _TopType, Comp,
Prop}],PrevSt}
end
end;
-gen_dec_line_special(Erule, Atype, TopType, Comp, DecInfObj) ->
- case gen_dec_line_other(Erule, Atype, TopType, Comp) of
+gen_dec_line_special(Gen, Atype, TopType, Comp, DecInfObj) ->
+ case gen_dec_line_other(Gen, Atype, TopType, Comp) of
Fun when is_function(Fun, 1) ->
fun({BytesVar,PrevSt}) ->
Fun(BytesVar),
- gen_dec_line_dec_inf(Comp, DecInfObj),
+ gen_dec_line_dec_inf(Gen,Comp, DecInfObj),
{[],PrevSt}
end;
Imm0 ->
{imm,Imm0,
fun(Imm, {BytesVar,PrevSt}) ->
asn1ct_imm:dec_code_gen(Imm, BytesVar),
- gen_dec_line_dec_inf(Comp, DecInfObj),
+ gen_dec_line_dec_inf(Gen, Comp, DecInfObj),
{[],PrevSt}
end}
end.
-gen_dec_line_dec_inf(Comp, DecInfObj) ->
+gen_dec_line_dec_inf(Gen, Comp, DecInfObj) ->
#'ComponentType'{name=Cname} = Comp,
case DecInfObj of
{Cname,{_,_OSet,_UniqueFName,ValIndex}} ->
Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
- ValueMatch = value_match(ValIndex,Term),
+ ValueMatch = value_match(Gen, ValIndex,Term),
emit([",",nl,
"ObjFun = ",ValueMatch]);
_ ->
@@ -1705,20 +1890,17 @@ gen_dec_choice2(Erule, TopType, [H0|T], Pos, Sep0, Pre) ->
gen_dec_choice2(Erule, TopType, T, Pos+1, Sep, Pre);
gen_dec_choice2(_, _, [], _, _, _) -> ok.
-make_elements(I,Val,ExtCnames) ->
- make_elements(I,Val,ExtCnames,[]).
+get_input_vars(Val, I, N) ->
+ L = get_input_vars_1(Val, I, N),
+ lists:join(",", L).
-make_elements(I,Val,[_ExtCname],Acc)-> % the last one, no comma needed
- Element = make_element(I, Val),
- make_elements(I+1,Val,[],[Element|Acc]);
-make_elements(I,Val,[_ExtCname|Rest],Acc)->
- Element = make_element(I, Val),
- make_elements(I+1,Val,Rest,[", ",Element|Acc]);
-make_elements(_I,_,[],Acc) ->
- lists:reverse(Acc).
+get_input_vars_1(_Val, _I, 0) ->
+ [];
+get_input_vars_1(Val, I, N) ->
+ [get_input_var(Val, I)|get_input_vars_1(Val, I+1, N-1)].
-make_element(I, Val) ->
- lists:flatten(io_lib:format("element(~w, ~s)", [I,Val])).
+get_input_var(Val, I) ->
+ lists:flatten(io_lib:format("element(~w, ~s)", [I+1,Val])).
emit_extaddgroupTerms(VarSeries,[_]) ->
asn1ct_name:new(VarSeries),
@@ -1735,63 +1917,76 @@ flat_complist({Rl1,El,Rl2}) -> Rl1 ++ El ++ Rl2;
flat_complist({Rl,El}) -> Rl ++ El;
flat_complist(CompList) -> CompList.
-%%wrap_compList({Root1,Ext,Root2}) ->
-%% {Root1,wrap_extensionAdditionGroups(Ext),Root2};
-%%wrap_compList({Root1,Ext}) ->
-%% {Root1,wrap_extensionAdditionGroups(Ext)};
-%%wrap_compList(CompList) ->
-%% CompList.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% Will convert all componentTypes following 'ExtensionAdditionGroup'
+%% Convert all componentTypes following 'ExtensionAdditionGroup'
%% up to the matching 'ExtensionAdditionGroupEnd' into one componentType
-%% of type SEQUENCE with the componentTypes as components
+%% of type SEQUENCE with the componentTypes as components.
%%
-wrap_extensionAdditionGroups(ExtCompList,ExtGroupPosLen) ->
- wrap_extensionAdditionGroups(ExtCompList,ExtGroupPosLen,[],0,0).
+wrap_extensionAdditionGroups(ExtCompList, ExtGroupPosLen) ->
+ wrap_eags(ExtCompList, ExtGroupPosLen, 0, 0).
-wrap_extensionAdditionGroups([{'ExtensionAdditionGroup',_Number}|Rest],
- [{ActualPos,_,_}|ExtGroupPosLenRest],Acc,_ExtAddGroupDiff,ExtGroupNum) ->
- {ExtGroupCompList,['ExtensionAdditionGroupEnd'|Rest2]} =
+wrap_eags([{'ExtensionAdditionGroup',_Number}|T0],
+ [{ActualPos,_,_}|Gs], _ExtAddGroupDiff, ExtGroupNum) ->
+ {ExtGroupCompList,['ExtensionAdditionGroupEnd'|T]} =
lists:splitwith(fun(#'ComponentType'{}) -> true;
(_) -> false
- end,
- Rest),
- wrap_extensionAdditionGroups(Rest2,ExtGroupPosLenRest,
- [#'ComponentType'{
- name=list_to_atom("ExtAddGroup"++
- integer_to_list(ExtGroupNum+1)),
- typespec=#type{def=#'SEQUENCE'{
- extaddgroup=ExtGroupNum+1,
- components=ExtGroupCompList}},
- textual_order = ActualPos,
- prop='OPTIONAL'}|Acc],length(ExtGroupCompList)-1,
- ExtGroupNum+1);
-wrap_extensionAdditionGroups([H=#'ComponentType'{textual_order=Tord}|T],
- ExtAddGrpLenPos,Acc,ExtAddGroupDiff,ExtGroupNum) when is_integer(Tord) ->
- wrap_extensionAdditionGroups(T,ExtAddGrpLenPos,[H#'ComponentType'{
- textual_order=Tord - ExtAddGroupDiff}|Acc],ExtAddGroupDiff,ExtGroupNum);
-wrap_extensionAdditionGroups([H|T],ExtAddGrpLenPos,Acc,ExtAddGroupDiff,ExtGroupNum) ->
- wrap_extensionAdditionGroups(T,ExtAddGrpLenPos,[H|Acc],ExtAddGroupDiff,ExtGroupNum);
-wrap_extensionAdditionGroups([],_,Acc,_,_) ->
- lists:reverse(Acc).
-
-value_match(Index,Value) when is_atom(Value) ->
- value_match(Index,atom_to_list(Value));
-value_match([],Value) ->
+ end, T0),
+ Name = list_to_atom(lists:concat(["ExtAddGroup",ExtGroupNum+1])),
+ Seq = #type{def=#'SEQUENCE'{extaddgroup=ExtGroupNum+1,
+ components=ExtGroupCompList}},
+ Comp = #'ComponentType'{name=Name,
+ typespec=Seq,
+ textual_order=ActualPos,
+ prop='OPTIONAL'},
+ [Comp|wrap_eags(T, Gs, length(ExtGroupCompList)-1, ExtGroupNum+1)];
+wrap_eags([#'ComponentType'{textual_order=Tord}=H|T],
+ ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum)
+ when is_integer(Tord) ->
+ Comp = H#'ComponentType'{textual_order=Tord - ExtAddGroupDiff},
+ [Comp|wrap_eags(T, ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum)];
+wrap_eags([H|T], ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum) ->
+ [H|wrap_eags(T, ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum)];
+wrap_eags([], _, _, _) ->
+ [].
+
+value_match(#gen{pack=record}, VIs, Value) ->
+ value_match_rec(VIs, Value);
+value_match(#gen{pack=map}, VIs, Value) ->
+ value_match_map(VIs, Value).
+
+value_match_rec([], Value) ->
+ Value;
+value_match_rec([{VI,_}|VIs], Value0) ->
+ Value = value_match_rec(VIs, Value0),
+ lists:concat(["element(",VI,", ",Value,")"]).
+
+value_match_map([], Value) ->
Value;
-value_match([{VI,_}|VIs],Value) ->
- value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
-value_match1(Value,[],Acc,Depth) ->
- Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
-value_match1(Value,[{VI,_}|VIs],Acc,Depth) ->
- value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
-
-enc_dig_out_value([], Value) ->
+value_match_map([{_,Name}|VIs], Value0) ->
+ Value = value_match_map(VIs, Value0),
+ lists:concat(["maps:get(",Name,", ",Value,")"]).
+
+enc_dig_out_value(_Gen, [], Value) ->
{[],Value};
-enc_dig_out_value([{N,_}|T], Value) ->
- {Imm0,Dst0} = enc_dig_out_value(T, Value),
+enc_dig_out_value(#gen{pack=record}=Gen, [{N,_}|T], Value) ->
+ {Imm0,Dst0} = enc_dig_out_value(Gen, T, Value),
{Imm,Dst} = asn1ct_imm:enc_element(N, Dst0),
+ {Imm0++Imm,Dst};
+enc_dig_out_value(#gen{pack=map}, [{N,'ASN1_top'}], _Value) ->
+ {[],{var,lists:concat(["Input@",N-1])}};
+enc_dig_out_value(#gen{pack=map}=Gen, [{_,Name}|T], Value) ->
+ {Imm0,Dst0} = enc_dig_out_value(Gen, T, Value),
+ {Imm,Dst} = asn1ct_imm:enc_maps_get(Name, Dst0),
{Imm0++Imm,Dst}.
make_var(Base) ->
{var,atom_to_list(asn1ct_gen:mk_var(asn1ct_name:curr(Base)))}.
+
+attribute_comment(InnerType, TextPos, Cname) ->
+ DispType = case InnerType of
+ #'Externaltypereference'{type=T} -> T;
+ IT when is_tuple(IT) -> element(2,IT);
+ _ -> InnerType
+ end,
+ Comment = ["attribute ",Cname,"(",TextPos,") with type ",DispType],
+ lists:concat(Comment).
diff --git a/lib/asn1/src/asn1ct_eval_ext.funcs b/lib/asn1/src/asn1ct_eval_ext.funcs
index 5761901f89..01c67e7b5a 100644
--- a/lib/asn1/src/asn1ct_eval_ext.funcs
+++ b/lib/asn1/src/asn1ct_eval_ext.funcs
@@ -1 +1,2 @@
{ext,transform_to_EXTERNAL1994,1}.
+{ext,transform_to_EXTERNAL1994_maps,1}.
diff --git a/lib/asn1/src/asn1ct_gen.erl b/lib/asn1/src/asn1ct_gen.erl
index bfaffa13bf..9f628c7b04 100644
--- a/lib/asn1/src/asn1ct_gen.erl
+++ b/lib/asn1/src/asn1ct_gen.erl
@@ -34,10 +34,10 @@
insert_once/2,
ct_gen_module/1,
index2suffix/1,
- get_record_name_prefix/0,
+ get_record_name_prefix/1,
conform_value/2,
named_bitstring_value/2]).
--export([pgen/5,
+-export([pgen/3,
mk_var/1,
un_hyphen_var/1]).
-export([gen_encode_constructed/4,
@@ -45,23 +45,19 @@
-define(SUPPRESSION_FUNC, 'dialyzer-suppressions').
+
%% pgen(Outfile, Erules, Module, TypeOrVal, Options)
-%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
-%% .hrl file is only generated if necessary
-%% Erules = per | ber
-%% Module = atom()
-%% TypeOrVal = {TypeList,ValueList}
-%% TypeList = ValueList = [atom()]
-%% Options = [Options] from asn1ct:compile()
-
-pgen(OutFile,Erules,Module,TypeOrVal,Options) ->
- pgen_module(OutFile,Erules,Module,TypeOrVal,Options,true).
-
-
-pgen_module(OutFile,Erules,Module,
- TypeOrVal = {Types,_Values,_Ptypes,_Classes,_Objects,_ObjectSets},
- Options,Indent) ->
- N2nConvEnums = [CName|| {n2n,CName} <- get(encoding_options)],
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to
+%% an ASN.1 module. The .hrl file is only generated if necessary.
+
+-spec pgen(Outfile, Gen, Code) -> 'ok' when
+ Outfile :: any(),
+ Gen :: #gen{},
+ Code :: #abst{}.
+
+pgen(OutFile, #gen{options=Options}=Gen, Code) ->
+ #abst{name=Module,types=Types} = Code,
+ N2nConvEnums = [CName|| {n2n,CName} <- Options],
case N2nConvEnums -- Types of
[] ->
ok;
@@ -69,30 +65,30 @@ pgen_module(OutFile,Erules,Module,
exit({"Non existing ENUMERATION types used in n2n option",
UnmatchedTypes})
end,
- put(outfile,OutFile),
- HrlGenerated = pgen_hrl(Erules,Module,TypeOrVal,Options,Indent),
+ put(outfile, OutFile),
+ put(currmod, Module),
+ HrlGenerated = pgen_hrl(Gen, Code),
asn1ct_name:start(),
ErlFile = lists:concat([OutFile,".erl"]),
_ = open_output_file(ErlFile),
asn1ct_func:start_link(),
- gen_head(Erules,Module,HrlGenerated),
- pgen_exports(Erules,Module,TypeOrVal),
- pgen_dispatcher(Erules,Module,TypeOrVal),
+ gen_head(Gen, Module, HrlGenerated),
+ pgen_exports(Gen, Code),
+ pgen_dispatcher(Gen, Types),
pgen_info(),
- pgen_typeorval(Erules,Module,N2nConvEnums,TypeOrVal),
- pgen_partial_incomplete_decode(Erules),
-% gen_vars(asn1_db:mod_to_vars(Module)),
-% gen_tag_table(AllTypes),
+ pgen_typeorval(Gen, N2nConvEnums, Code),
+ pgen_partial_incomplete_decode(Gen),
emit([nl,
"%%%",nl,
"%%% Run-time functions.",nl,
"%%%",nl]),
- dialyzer_suppressions(Erules),
+ dialyzer_suppressions(Gen),
Fd = get(gen_file_out),
asn1ct_func:generate(Fd),
close_output_file(),
_ = erase(outfile),
- asn1ct:verbose("--~p--~n",[{generated,ErlFile}],Options).
+ asn1ct:verbose("--~p--~n", [{generated,ErlFile}], Gen),
+ ok.
dialyzer_suppressions(Erules) ->
emit([nl,
@@ -100,20 +96,27 @@ dialyzer_suppressions(Erules) ->
Rtmod = ct_gen_module(Erules),
Rtmod:dialyzer_suppressions(Erules).
-pgen_typeorval(Erules,Module,N2nConvEnums,{Types,Values,_Ptypes,_Classes,Objects,ObjectSets}) ->
+pgen_typeorval(Erules, N2nConvEnums, Code) ->
+ #abst{name=Module,types=Types,values=Values,
+ objects=Objects,objsets=ObjectSets} = Code,
Rtmod = ct_gen_module(Erules),
pgen_types(Rtmod,Erules,N2nConvEnums,Module,Types),
- pgen_values(Erules,Module,Values),
+ pgen_values(Values, Module),
pgen_objects(Rtmod,Erules,Module,Objects),
pgen_objectsets(Rtmod,Erules,Module,ObjectSets),
pgen_partial_decode(Rtmod,Erules,Module).
-pgen_values(_,_,[]) ->
- true;
-pgen_values(Erules,Module,[H|T]) ->
- Valuedef = asn1_db:dbget(Module,H),
- gen_value(Valuedef),
- pgen_values(Erules,Module,T).
+%% Generate a function 'V'/0 for each Value V defined in the ASN.1 module.
+%% The function returns the value in an Erlang representation which can be
+%% used as input to the runtime encode functions.
+
+pgen_values([H|T], Module) ->
+ #valuedef{name=Name,value=Value} = asn1_db:dbget(Module, H),
+ emit([{asis,Name},"() ->",nl,
+ {asis,Value},".",nl,nl]),
+ pgen_values(T, Module);
+pgen_values([], _) ->
+ ok.
pgen_types(_, _, _, _, []) ->
true;
@@ -181,10 +184,10 @@ pgen_objectsets(Rtmod,Erules,Module,[H|T]) ->
Rtmod:gen_objectset_code(Erules,TypeDef),
pgen_objectsets(Rtmod,Erules,Module,T).
-pgen_partial_decode(Rtmod,Erule,Module) when Erule == ber ->
- pgen_partial_inc_dec(Rtmod,Erule,Module),
- pgen_partial_dec(Rtmod,Erule,Module);
-pgen_partial_decode(_,_,_) ->
+pgen_partial_decode(Rtmod, #gen{erule=ber}=Gen, Module) ->
+ pgen_partial_inc_dec(Rtmod, Gen, Module),
+ pgen_partial_dec(Rtmod, Gen, Module);
+pgen_partial_decode(_, _, _) ->
ok.
pgen_partial_inc_dec(Rtmod,Erules,Module) ->
@@ -225,7 +228,7 @@ pgen_partial_inc_dec1(Rtmod,Erules,Module,[P|Ps]) ->
pgen_partial_inc_dec1(_,_,_,[]) ->
ok.
-gen_partial_inc_dec_refed_funcs(Rtmod,Erule) when Erule == ber ->
+gen_partial_inc_dec_refed_funcs(Rtmod, #gen{erule=ber}=Gen) ->
case asn1ct:next_refed_func() of
[] ->
ok;
@@ -233,19 +236,17 @@ gen_partial_inc_dec_refed_funcs(Rtmod,Erule) when Erule == ber ->
TypeDef = asn1_db:dbget(M,Name),
asn1ct:update_gen_state(namelist,Pattern),
asn1ct:set_current_sindex(Sindex),
- Rtmod:gen_inc_decode(Erule,TypeDef),
- gen_dec_part_inner_constr(Rtmod,Erule,TypeDef,[Name]),
- gen_partial_inc_dec_refed_funcs(Rtmod,Erule);
+ Rtmod:gen_inc_decode(Gen, TypeDef),
+ gen_dec_part_inner_constr(Rtmod, Gen, TypeDef, [Name]),
+ gen_partial_inc_dec_refed_funcs(Rtmod, Gen);
{Name,Sindex,Pattern,Type} ->
TypeDef=#typedef{name=asn1ct_gen:list2name(Name),typespec=Type},
asn1ct:update_gen_state(namelist,Pattern),
asn1ct:set_current_sindex(Sindex),
- Rtmod:gen_inc_decode(Erule,TypeDef),
- gen_dec_part_inner_constr(Rtmod,Erule,TypeDef,Name),
- gen_partial_inc_dec_refed_funcs(Rtmod,Erule)
- end;
-gen_partial_inc_dec_refed_funcs(_,_) ->
- ok.
+ Rtmod:gen_inc_decode(Gen, TypeDef),
+ gen_dec_part_inner_constr(Rtmod, Gen, TypeDef, Name),
+ gen_partial_inc_dec_refed_funcs(Rtmod, Gen)
+ end.
pgen_partial_dec(_Rtmod,Erules,_Module) ->
Type_pattern = asn1ct:get_gen_state_field(type_pattern),
@@ -254,16 +255,16 @@ pgen_partial_dec(_Rtmod,Erules,_Module) ->
pgen_partial_types(Erules,Type_pattern),
ok.
-pgen_partial_types(Erules,Type_pattern) ->
- % until this functionality works on all back-ends
- Options = get(encoding_options),
- case lists:member(asn1config,Options) of
+pgen_partial_types(#gen{options=Options}=Gen, TypePattern) ->
+ %% until this functionality works on all back-ends
+ case lists:member(asn1config, Options) of
true ->
- pgen_partial_types1(Erules,Type_pattern);
- _ -> ok
+ pgen_partial_types1(Gen, TypePattern);
+ false ->
+ ok
end.
-
+
pgen_partial_types1(Erules,[{FuncName,[TopType|RestTypes]}|Rest]) ->
% emit([FuncName,"(Bytes) ->",nl]),
CurrMod = get(currmod),
@@ -441,7 +442,8 @@ pgen_partial_incomplete_decode(Erule) ->
_ ->
ok
end.
-pgen_partial_incomplete_decode1(ber) ->
+
+pgen_partial_incomplete_decode1(#gen{erule=ber}) ->
case asn1ct:read_config_data(partial_incomplete_decode) of
undefined ->
ok;
@@ -451,7 +453,7 @@ pgen_partial_incomplete_decode1(ber) ->
GeneratedFs= asn1ct:get_gen_state_field(gen_refed_funcs),
% io:format("GeneratedFs :~n~p~n",[GeneratedFs]),
gen_part_decode_funcs(GeneratedFs,0);
-pgen_partial_incomplete_decode1(_) -> ok.
+pgen_partial_incomplete_decode1(#gen{}) -> ok.
emit_partial_incomplete_decode({FuncName,TopType,Pattern}) ->
TypePattern = asn1ct:get_gen_state_field(inc_type_pattern),
@@ -578,18 +580,6 @@ un_hyphen_var([H|T]) ->
un_hyphen_var([]) ->
[].
-%% Generate value functions ***************
-%% ****************************************
-%% Generates a function 'V'/0 for each Value V defined in the ASN.1 module
-%% the function returns the value in an Erlang representation which can be
-%% used as input to the runtime encode functions
-
-gen_value(Value) when is_record(Value,valuedef) ->
-%% io:format(" ~w ",[Value#valuedef.name]),
- emit({"'",Value#valuedef.name,"'() ->",nl}),
- V = Value#valuedef.value,
- emit([{asis,V},".",nl,nl]).
-
gen_encode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) ->
Rtmod = ct_constructed_module(Erules),
case InnerType of
@@ -654,78 +644,32 @@ gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,typedef) ->
gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
-pgen_exports(Erules,_Module,{Types,Values,_,_,Objects,ObjectSets}) ->
- emit(["-export([encoding_rule/0,bit_string_format/0,",nl,
+pgen_exports(#gen{options=Options}=Gen, Code) ->
+ #abst{types=Types,values=Values,objects=Objects,objsets=ObjectSets} = Code,
+ emit(["-export([encoding_rule/0,maps/0,bit_string_format/0,",nl,
" legacy_erlang_types/0]).",nl]),
emit(["-export([",{asis,?SUPPRESSION_FUNC},"/1]).",nl]),
- case Types of
- [] -> ok;
- _ ->
- emit({"-export([",nl}),
- case Erules of
- ber ->
- gen_exports1(Types,"enc_",2);
- _ ->
- gen_exports1(Types,"enc_",1)
- end,
- emit({"-export([",nl}),
- case Erules of
- ber ->
- gen_exports1(Types, "dec_", 2);
- _ ->
- gen_exports1(Types, "dec_", 1)
- end
- end,
- case [X || {n2n,X} <- get(encoding_options)] of
- [] -> ok;
- A2nNames ->
- emit({"-export([",nl}),
- gen_exports1(A2nNames,"name2num_",1),
- emit({"-export([",nl}),
- gen_exports1(A2nNames,"num2name_",1)
- end,
- case Values of
- [] -> ok;
- _ ->
- emit({"-export([",nl}),
- gen_exports1(Values,"",0)
+ case Gen of
+ #gen{erule=ber} ->
+ gen_exports(Types, "enc_", 2),
+ gen_exports(Types, "dec_", 2),
+ gen_exports(Objects, "enc_", 3),
+ gen_exports(Objects, "dec_", 3),
+ gen_exports(ObjectSets, "getenc_", 1),
+ gen_exports(ObjectSets, "getdec_", 1);
+ #gen{erule=per} ->
+ gen_exports(Types, "enc_", 1),
+ gen_exports(Types, "dec_", 1)
end,
- case Objects of
- [] -> ok;
- _ ->
- case erule(Erules) of
- per ->
- ok;
- ber ->
- emit({"-export([",nl}),
- gen_exports1(Objects,"enc_",3),
- emit({"-export([",nl}),
- gen_exports1(Objects,"dec_",3)
- end
- end,
- case ObjectSets of
- [] -> ok;
- _ ->
- case erule(Erules) of
- per ->
- ok;
- ber ->
- emit({"-export([",nl}),
- gen_exports1(ObjectSets, "getenc_",1),
- emit({"-export([",nl}),
- gen_exports1(ObjectSets, "getdec_",1)
- end
- end,
- emit({"-export([info/0]).",nl}),
- gen_partial_inc_decode_exports(),
- gen_selected_decode_exports(),
- emit({nl,nl}).
-gen_exports1([F1,F2|T],Prefix,Arity) ->
- emit({"'",Prefix,F1,"'/",Arity,com,nl}),
- gen_exports1([F2|T],Prefix,Arity);
-gen_exports1([Flast|_T],Prefix,Arity) ->
- emit({"'",Prefix,Flast,"'/",Arity,nl,"]).",nl,nl}).
+ A2nNames = [X || {n2n,X} <- Options],
+ gen_exports(A2nNames, "name2num_", 1),
+ gen_exports(A2nNames, "num2name_", 1),
+
+ gen_exports(Values, "", 0),
+ emit(["-export([info/0]).",nl,nl]),
+ gen_partial_inc_decode_exports(),
+ gen_selected_decode_exports().
gen_partial_inc_decode_exports() ->
case {asn1ct:read_config_data(partial_incomplete_decode),
@@ -734,66 +678,54 @@ gen_partial_inc_decode_exports() ->
ok;
{_,undefined} ->
ok;
- {Data,_} ->
- gen_partial_inc_decode_exports(Data),
- emit(["-export([decode_part/2]).",nl])
+ {Data0,_} ->
+ Data = [Name || {Name,_,_} <- Data0],
+ gen_exports(Data, "", 1),
+ emit(["-export([decode_part/2]).",nl,nl])
end.
-gen_partial_inc_decode_exports([]) ->
- ok;
-gen_partial_inc_decode_exports([{Name,_,_}|Rest]) ->
- emit(["-export([",Name,"/1"]),
- gen_partial_inc_decode_exports1(Rest);
-gen_partial_inc_decode_exports([_|Rest]) ->
- gen_partial_inc_decode_exports(Rest).
-
-gen_partial_inc_decode_exports1([]) ->
- emit(["]).",nl]);
-gen_partial_inc_decode_exports1([{Name,_,_}|Rest]) ->
- emit([", ",Name,"/1"]),
- gen_partial_inc_decode_exports1(Rest);
-gen_partial_inc_decode_exports1([_|Rest]) ->
- gen_partial_inc_decode_exports1(Rest).
gen_selected_decode_exports() ->
case asn1ct:get_gen_state_field(type_pattern) of
undefined ->
ok;
- L ->
- gen_selected_decode_exports(L)
+ Data0 ->
+ Data = [Name || {Name,_} <- Data0],
+ gen_exports(Data, "", 1)
end.
-gen_selected_decode_exports([]) ->
+gen_exports([], _Prefix, _Arity) ->
ok;
-gen_selected_decode_exports([{FuncName,_}|Rest]) ->
- emit(["-export([",FuncName,"/1"]),
- gen_selected_decode_exports1(Rest).
-gen_selected_decode_exports1([]) ->
- emit(["]).",nl,nl]);
-gen_selected_decode_exports1([{FuncName,_}|Rest]) ->
- emit([",",nl," ",FuncName,"/1"]),
- gen_selected_decode_exports1(Rest).
-
-pgen_dispatcher(Erules,_Module,{[],_Values,_,_,_Objects,_ObjectSets}) ->
+gen_exports([_|_]=L0, Prefix, Arity) ->
+ FF = fun(F0) ->
+ F = list_to_atom(lists:concat([Prefix,F0])),
+ [{asis,F},"/",Arity]
+ end,
+ L = lists:join(",\n", [FF(F) || F <- L0]),
+ emit(["-export([",nl,
+ L,nl,
+ "]).",nl,nl]).
+
+pgen_dispatcher(Erules, []) ->
gen_info_functions(Erules);
-pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) ->
+pgen_dispatcher(Gen, Types) ->
emit(["-export([encode/2,decode/2]).",nl,nl]),
- gen_info_functions(Erules),
+ gen_info_functions(Gen),
- Options = get(encoding_options),
+ Options = Gen#gen.options,
NoFinalPadding = lists:member(no_final_padding, Options),
NoOkWrapper = proplists:get_bool(no_ok_wrapper, Options),
- Call = case Erules of
- per ->
- asn1ct_func:need({Erules,complete,1}),
+ Call = case Gen of
+ #gen{erule=per,aligned=true} ->
+ asn1ct_func:need({per,complete,1}),
"complete(encode_disp(Type, Data))";
- ber ->
+ #gen{erule=ber} ->
"iolist_to_binary(element(1, encode_disp(Type, Data)))";
- uper when NoFinalPadding == true ->
- asn1ct_func:need({Erules,complete_NFP,1}),
+ #gen{erule=per,aligned=false} when NoFinalPadding ->
+ asn1ct_func:need({uper,complete_NFP,1}),
"complete_NFP(encode_disp(Type, Data))";
- uper ->
- asn1ct_func:need({Erules,complete,1}),
+ #gen{erule=per,aligned=false} ->
+ asn1ct_func:need({uper,complete,1}),
"complete(encode_disp(Type, Data))"
end,
@@ -809,36 +741,36 @@ pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) ->
end,
emit([nl,nl]),
- Return_rest = proplists:get_bool(undec_rest, Options),
- Data = case {Erules,Return_rest} of
- {ber,true} -> "Data0";
- _ -> "Data"
+ ReturnRest = proplists:get_bool(undec_rest, Gen#gen.options),
+ Data = case Gen#gen.erule =:= ber andalso ReturnRest of
+ true -> "Data0";
+ false -> "Data"
end,
- emit(["decode(Type,",Data,") ->",nl]),
+ emit(["decode(Type, ",Data,") ->",nl]),
DecWrap =
- case {Erules,Return_rest} of
- {ber,false} ->
+ case {Gen,ReturnRest} of
+ {#gen{erule=ber},false} ->
asn1ct_func:need({ber,ber_decode_nif,1}),
"element(1, ber_decode_nif(Data))";
- {ber,true} ->
+ {#gen{erule=ber},true} ->
asn1ct_func:need({ber,ber_decode_nif,1}),
emit(["{Data,Rest} = ber_decode_nif(Data0),",nl]),
"Data";
- _ ->
+ {_,_} ->
"Data"
end,
emit([case NoOkWrapper of
false -> "try";
true -> "case"
end, " decode_disp(Type, ",DecWrap,") of",nl]),
- case erule(Erules) of
- ber ->
+ case Gen of
+ #gen{erule=ber} ->
emit([" Result ->",nl]);
- per ->
+ #gen{erule=per} ->
emit([" {Result,Rest} ->",nl])
end,
- case Return_rest of
+ case ReturnRest of
false -> result_line(NoOkWrapper, ["Result"]);
true -> result_line(NoOkWrapper, ["Result","Rest"])
end,
@@ -849,18 +781,11 @@ pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) ->
emit([nl,"end.",nl,nl])
end,
- gen_decode_partial_incomplete(Erules),
+ gen_decode_partial_incomplete(Gen),
+ gen_partial_inc_dispatcher(Gen),
- case Erules of
- ber ->
- gen_dispatcher(Types,"encode_disp","enc_",""),
- gen_dispatcher(Types,"decode_disp","dec_",""),
- gen_partial_inc_dispatcher();
- _PerOrPer_bin ->
- gen_dispatcher(Types,"encode_disp","enc_",""),
- gen_dispatcher(Types,"decode_disp","dec_","")
- end,
- emit([nl,nl]).
+ gen_dispatcher(Types, "encode_disp", "enc_"),
+ gen_dispatcher(Types, "decode_disp", "dec_").
result_line(NoOkWrapper, Items) ->
S = [" "|case NoOkWrapper of
@@ -877,23 +802,35 @@ result_line_1(Items) ->
try_catch() ->
[" catch",nl,
" Class:Exception when Class =:= error; Class =:= exit ->",nl,
+ " Stk = erlang:get_stacktrace(),",nl,
" case Exception of",nl,
- " {error,Reason}=Error ->",nl,
- " Error;",nl,
+ " {error,{asn1,Reason}} ->",nl,
+ " {error,{asn1,{Reason,Stk}}};",nl,
" Reason ->",nl,
- " {error,{asn1,Reason}}",nl,
+ " {error,{asn1,{Reason,Stk}}}",nl,
" end",nl,
"end."].
-gen_info_functions(Erules) ->
+gen_info_functions(Gen) ->
+ Erule = case Gen of
+ #gen{erule=ber} -> ber;
+ #gen{erule=per,aligned=false} -> uper;
+ #gen{erule=per,aligned=true} -> per
+ end,
+ Maps = case Gen of
+ #gen{pack=record} -> false;
+ #gen{pack=map} -> true
+ end,
emit(["encoding_rule() -> ",
- {asis,Erules},".",nl,nl,
+ {asis,Erule},".",nl,nl,
+ "maps() -> ",
+ {asis,Maps},".",nl,nl,
"bit_string_format() -> ",
{asis,asn1ct:get_bit_string_format()},".",nl,nl,
"legacy_erlang_types() -> ",
{asis,asn1ct:use_legacy_types()},".",nl,nl]).
-gen_decode_partial_incomplete(ber) ->
+gen_decode_partial_incomplete(#gen{erule=ber}) ->
case {asn1ct:read_config_data(partial_incomplete_decode),
asn1ct:get_gen_state_field(inc_type_pattern)} of
{undefined,_} ->
@@ -931,10 +868,10 @@ gen_decode_partial_incomplete(ber) ->
EmitCaseClauses(),
emit([".",nl,nl])
end;
-gen_decode_partial_incomplete(_Erule) ->
+gen_decode_partial_incomplete(#gen{}) ->
ok.
-gen_partial_inc_dispatcher() ->
+gen_partial_inc_dispatcher(#gen{erule=ber}) ->
case {asn1ct:read_config_data(partial_incomplete_decode),
asn1ct:get_gen_state_field(inc_type_pattern)} of
{undefined,_} ->
@@ -944,7 +881,9 @@ gen_partial_inc_dispatcher() ->
{Data1,Data2} ->
% io:format("partial_incomplete_decode: ~p~ninc_type_pattern: ~p~n",[Data,Data2]),
gen_partial_inc_dispatcher(Data1, Data2, "")
- end.
+ end;
+gen_partial_inc_dispatcher(#gen{}) ->
+ ok.
gen_partial_inc_dispatcher([{FuncName,TopType,_Pattern}|Rest], TypePattern, Sep) ->
TPattern =
@@ -968,12 +907,18 @@ gen_partial_inc_dispatcher([{FuncName,TopType,_Pattern}|Rest], TypePattern, Sep)
gen_partial_inc_dispatcher([], _, _) ->
emit([".",nl]).
-gen_dispatcher([F1,F2|T],FuncName,Prefix,ExtraArg) ->
- emit([FuncName,"('",F1,"',Data) -> '",Prefix,F1,"'(Data",ExtraArg,")",";",nl]),
- gen_dispatcher([F2|T],FuncName,Prefix,ExtraArg);
-gen_dispatcher([Flast|_T],FuncName,Prefix,ExtraArg) ->
- emit([FuncName,"('",Flast,"',Data) -> '",Prefix,Flast,"'(Data",ExtraArg,")",";",nl]),
- emit([FuncName,"(","Type",",_Data) -> exit({error,{asn1,{undefined_type,Type}}}).",nl,nl,nl]).
+gen_dispatcher(L, DispFunc, Prefix) ->
+ gen_dispatcher_1(L, DispFunc, Prefix),
+ emit([DispFunc,"(","Type",", _Data) ->"
+ " exit({error,{asn1,{undefined_type,Type}}}).",nl,nl]).
+
+gen_dispatcher_1([F|T], FuncName, Prefix) ->
+ Func = list_to_atom(lists:concat([Prefix,F])),
+ emit([FuncName,"(",{asis,F},", Data) -> ",
+ {asis,Func},"(Data)",";",nl]),
+ gen_dispatcher_1(T, FuncName, Prefix);
+gen_dispatcher_1([], _, _) ->
+ ok.
pgen_info() ->
emit(["info() ->",nl,
@@ -1092,22 +1037,21 @@ open_output_file(F) ->
close_output_file() ->
ok = file:close(erase(gen_file_out)).
-pgen_hrl(Erules,Module,TypeOrVal,Options,_Indent) ->
- put(currmod,Module),
- {Types,Values,Ptypes,_,_,_} = TypeOrVal,
+pgen_hrl(#gen{pack=record}=Gen, Code) ->
+ #abst{name=Module,types=Types,values=Values,ptypes=Ptypes} = Code,
Ret =
- case pgen_hrltypes(Erules,Module,Ptypes++Types,0) of
+ case pgen_hrltypes(Gen, Module, Ptypes++Types, 0) of
0 ->
case Values of
[] ->
0;
_ ->
- open_hrl(get(outfile),get(currmod)),
- pgen_macros(Erules,Module,Values),
+ open_hrl(get(outfile), Module),
+ pgen_macros(Gen, Module, Values),
1
end;
X ->
- pgen_macros(Erules,Module,Values),
+ pgen_macros(Gen, Module, Values),
X
end,
case Ret of
@@ -1119,62 +1063,61 @@ pgen_hrl(Erules,Module,TypeOrVal,Options,_Indent) ->
close_output_file(),
asn1ct:verbose("--~p--~n",
[{generated,lists:concat([get(outfile),".hrl"])}],
- Options),
+ Gen),
Y
- end.
+ end;
+pgen_hrl(#gen{pack=map}, _) ->
+ 0.
pgen_macros(_,_,[]) ->
true;
-pgen_macros(Erules,Module,[H|T]) ->
- Valuedef = asn1_db:dbget(Module,H),
- gen_macro(Valuedef),
- pgen_macros(Erules,Module,T).
+pgen_macros(Gen, Module, [H|T]) ->
+ Valuedef = asn1_db:dbget(Module, H),
+ gen_macro(Gen, Valuedef),
+ pgen_macros(Gen, Module, T).
pgen_hrltypes(_,_,[],NumRecords) ->
NumRecords;
-pgen_hrltypes(Erules,Module,[H|T],NumRecords) ->
-% io:format("records = ~p~n",NumRecords),
- Typedef = asn1_db:dbget(Module,H),
- AddNumRecords = gen_record(Typedef,NumRecords),
- pgen_hrltypes(Erules,Module,T,NumRecords+AddNumRecords).
+pgen_hrltypes(Gen, Module, [H|T], NumRecords) ->
+ Typedef = asn1_db:dbget(Module, H),
+ AddNumRecords = gen_record(Gen, Typedef, NumRecords),
+ pgen_hrltypes(Gen, Module, T, NumRecords+AddNumRecords).
%% Generates a macro for value Value defined in the ASN.1 module
-gen_macro(Value) when is_record(Value,valuedef) ->
- Prefix = get_macro_name_prefix(),
- emit({"-define('",Prefix,Value#valuedef.name,"', ",
- {asis,Value#valuedef.value},").",nl}).
+gen_macro(Gen, #valuedef{name=Name,value=Value}) ->
+ Prefix = get_macro_name_prefix(Gen),
+ emit(["-define('",Prefix,Name,"', ",{asis,Value},").",nl]).
%% Generate record functions **************
%% Generates an Erlang record for each named and unnamed SEQUENCE and SET in the ASN.1
%% module. If no SEQUENCE or SET is found there is no .hrl file generated
-gen_record(Tdef,NumRecords) when is_record(Tdef,typedef) ->
+gen_record(Gen, #typedef{}=Tdef, NumRecords) ->
Name = [Tdef#typedef.name],
Type = Tdef#typedef.typespec,
- gen_record(type,Name,Type,NumRecords);
-
-gen_record(Tdef,NumRecords) when is_record(Tdef,ptypedef) ->
+ gen_record(Gen, type, Name, Type, NumRecords);
+gen_record(Gen, #ptypedef{}=Tdef, NumRecords) ->
Name = [Tdef#ptypedef.name],
Type = Tdef#ptypedef.typespec,
- gen_record(ptype,Name,Type,NumRecords).
-
-gen_record(TorPtype,Name,[#'ComponentType'{name=Cname,typespec=Type}|T],Num) ->
- Num2 = gen_record(TorPtype,[Cname|Name],Type,Num),
- gen_record(TorPtype,Name,T,Num2);
-gen_record(TorPtype,Name,{Clist1,Clist2},Num)
+ gen_record(Gen, ptype, Name, Type, NumRecords).
+
+gen_record(Gen, TorPtype, Name,
+ [#'ComponentType'{name=Cname,typespec=Type}|T], Num) ->
+ Num2 = gen_record(Gen, TorPtype, [Cname|Name], Type, Num),
+ gen_record(Gen, TorPtype, Name, T, Num2);
+gen_record(Gen, TorPtype, Name, {Clist1,Clist2}, Num)
when is_list(Clist1), is_list(Clist2) ->
- gen_record(TorPtype,Name,Clist1++Clist2,Num);
-gen_record(TorPtype,Name,{Clist1,EClist,Clist2},Num)
+ gen_record(Gen, TorPtype, Name, Clist1++Clist2, Num);
+gen_record(Gen, TorPtype, Name, {Clist1,EClist,Clist2}, Num)
when is_list(Clist1), is_list(EClist), is_list(Clist2) ->
- gen_record(TorPtype,Name,Clist1++EClist++Clist2,Num);
-gen_record(TorPtype,Name,[_|T],Num) -> % skip EXTENSIONMARK
- gen_record(TorPtype,Name,T,Num);
-gen_record(_TorPtype,_Name,[],Num) ->
+ gen_record(Gen, TorPtype, Name, Clist1++EClist++Clist2, Num);
+gen_record(Gen, TorPtype, Name, [_|T], Num) -> % skip EXTENSIONMARK
+ gen_record(Gen, TorPtype, Name, T, Num);
+gen_record(_Gen, _TorPtype, _Name, [], Num) ->
Num;
-
-gen_record(TorPtype,Name,Type,Num) when is_record(Type,type) ->
+gen_record(Gen, TorPtype, Name, #type{}=Type, Num) ->
Def = Type#type.def,
Rec = case Def of
Seq when is_record(Seq,'SEQUENCE') ->
@@ -1209,127 +1152,103 @@ gen_record(TorPtype,Name,Type,Num) when is_record(Type,type) ->
0 -> open_hrl(get(outfile),get(currmod));
_ -> true
end,
- Prefix = get_record_name_prefix(),
- emit({"-record('",Prefix,list2name(Name),"',{",nl}),
- RootList = case CompList of
- _ when is_list(CompList) ->
- CompList;
- {Rl,_} -> Rl;
- {Rl1,_Ext,_Rl2} -> Rl1
- end,
- gen_record2(Name,'SEQUENCE',RootList),
- NewCompList =
+ do_gen_record(Gen, Name, CompList),
+ NewCompList =
case CompList of
{CompList1,[]} ->
- emit({"}). % with extension mark",nl,nl}),
CompList1;
{Tr,ExtensionList2} ->
- case Tr of
- [] -> true;
- _ -> emit({",",nl})
- end,
- emit({"%% with extensions",nl}),
- gen_record2(Name, 'SEQUENCE', ExtensionList2,
- "", ext),
- emit({"}).",nl,nl}),
Tr ++ ExtensionList2;
{Rootl1,Extl,Rootl2} ->
- case Rootl1 =/= [] andalso Extl++Rootl2 =/= [] of
- true -> emit([com]);
- false -> ok
- end,
- case Rootl1 of
- [_|_] -> emit([nl]);
- [] -> ok
- end,
- emit(["%% with extensions",nl]),
- gen_record2(Name,'SEQUENCE',Extl,"",ext),
- case Extl =/= [] andalso Rootl2 =/= [] of
- true -> emit([com]);
- false -> ok
- end,
- case Extl of
- [_|_] -> emit([nl]);
- [] -> ok
- end,
- emit(["%% end of extensions",nl]),
- gen_record2(Name,'SEQUENCE',Rootl2,"",noext),
- emit(["}).",nl,nl]),
Rootl1++Extl++Rootl2;
- _ ->
- emit({"}).",nl,nl}),
+ _ ->
CompList
end,
- gen_record(TorPtype,Name,NewCompList,Num+1);
+ gen_record(Gen, TorPtype, Name, NewCompList, Num+1);
{inner,{'CHOICE', CompList}} ->
- gen_record(TorPtype,Name,CompList,Num);
+ gen_record(Gen, TorPtype, Name, CompList, Num);
{NewName,{_, CompList}} ->
- gen_record(TorPtype,NewName,CompList,Num)
+ gen_record(Gen, TorPtype, NewName, CompList, Num)
end;
-gen_record(_,_,_,NumRecords) -> % skip CLASS etc for now.
+gen_record(_, _, _, _, NumRecords) -> % skip CLASS etc for now.
NumRecords.
-
-gen_head(Erules,Mod,Hrl) ->
- Options = get(encoding_options),
- case Erules of
- per ->
- emit(["%% Generated by the Erlang ASN.1 PER-"
- "compiler version, utilizing bit-syntax:",
- asn1ct:vsn(),nl]);
- ber ->
- emit(["%% Generated by the Erlang ASN.1 BER_V2-"
- "compiler version, utilizing bit-syntax:",
- asn1ct:vsn(),nl]);
- uper ->
- emit(["%% Generated by the Erlang ASN.1 UNALIGNED"
- " PER-compiler version, utilizing bit-syntax:",
- asn1ct:vsn(),nl])
+
+do_gen_record(Gen, Name, CL0) ->
+ CL = case CL0 of
+ {Root,[]} ->
+ Root ++ [{comment,"with extension mark"}];
+ {Root,Ext} ->
+ Root ++ [{comment,"with exensions"}] ++
+ only_components(Ext);
+ {Root1,Ext,Root2} ->
+ Root1 ++ [{comment,"with exensions"}] ++
+ only_components(Ext) ++
+ [{comment,"end of extensions"}] ++ Root2;
+ _ when is_list(CL0) ->
+ CL0
+ end,
+ Prefix = get_record_name_prefix(Gen),
+ emit(["-record('",Prefix,list2name(Name),"', {"] ++
+ do_gen_record_1(CL) ++
+ [nl,"}).",nl,nl]).
+
+only_components(CL) ->
+ [C || #'ComponentType'{}=C <- CL].
+
+do_gen_record_1([#'ComponentType'{name=Name,prop=Prop}|T]) ->
+ Val = case Prop of
+ 'OPTIONAL' ->
+ " = asn1_NOVALUE";
+ {'DEFAULT',_} ->
+ " = asn1_DEFAULT";
+ _ ->
+ []
+ end,
+ Com = case needs_trailing_comma(T) of
+ true -> [com];
+ false -> []
end,
- emit({"%% Purpose: encoder and decoder to the types in mod ",Mod,nl,nl}),
- emit({"-module('",Mod,"').",nl}),
- put(currmod,Mod),
- emit({"-compile(nowarn_unused_vars).",nl}),
- emit({"-dialyzer(no_improper_lists).",nl}),
+ [nl," ",{asis,Name},Val,Com|do_gen_record_1(T)];
+do_gen_record_1([{comment,Text}|T]) ->
+ [nl," %% ",Text|do_gen_record_1(T)];
+do_gen_record_1([]) ->
+ [].
+
+needs_trailing_comma([#'ComponentType'{}|_]) -> true;
+needs_trailing_comma([_|T]) -> needs_trailing_comma(T);
+needs_trailing_comma([]) -> false.
+
+gen_head(#gen{options=Options}=Gen, Mod, Hrl) ->
+ Name = case Gen of
+ #gen{erule=per,aligned=false} ->
+ "PER (unaligned)";
+ #gen{erule=per,aligned=true} ->
+ "PER (aligned)";
+ #gen{erule=ber} ->
+ "BER"
+ end,
+ emit(["%% Generated by the Erlang ASN.1 ",Name,
+ " compiler. Version: ",asn1ct:vsn(),nl,
+ "%% Purpose: Encoding and decoding of the types in ",
+ Mod,".",nl,nl,
+ "-module('",Mod,"').",nl,
+ "-compile(nowarn_unused_vars).",nl,
+ "-dialyzer(no_improper_lists).",nl]),
case Hrl of
0 -> ok;
- _ -> emit({"-include(\"",Mod,".hrl\").",nl})
+ _ -> emit(["-include(\"",Mod,".hrl\").",nl])
end,
emit(["-asn1_info([{vsn,'",asn1ct:vsn(),"'},",nl,
" {module,'",Mod,"'},",nl,
" {options,",io_lib:format("~p",[Options]),"}]).",nl,nl]).
-
-
-gen_hrlhead(Mod) ->
- emit({"%% Generated by the Erlang ASN.1 compiler version:",asn1ct:vsn(),nl}),
- emit({"%% Purpose: Erlang record definitions for each named and unnamed",nl}),
- emit({"%% SEQUENCE and SET, and macro definitions for each value",nl}),
- emit({"%% definition,in module ",Mod,nl,nl}),
- emit({nl,nl}).
-gen_record2(Name,SeqOrSet,Comps) ->
- gen_record2(Name,SeqOrSet,Comps,"",noext).
-gen_record2(_Name,_SeqOrSet,[],_Com,_Extension) ->
- true;
-gen_record2(_Name,_SeqOrSet,[H = #'ComponentType'{name=Cname}],Com,Extension) ->
- emit(Com),
- emit({asis,Cname}),
- gen_record_default(H, Extension);
-gen_record2(Name,SeqOrSet,[H = #'ComponentType'{name=Cname}|T],Com, Extension) ->
- emit(Com),
- emit({asis,Cname}),
- gen_record_default(H, Extension),
- gen_record2(Name,SeqOrSet,T,", ", Extension);
-gen_record2(Name,SeqOrSet,[_|T],Com,Extension) ->
- %% skip EXTENSIONMARK, ExtensionAdditionGroup and other markers
- gen_record2(Name,SeqOrSet,T,Com,Extension).
-
-gen_record_default(#'ComponentType'{prop='OPTIONAL'}, _)->
- emit(" = asn1_NOVALUE");
-gen_record_default(#'ComponentType'{prop={'DEFAULT',_}}, _)->
- emit(" = asn1_DEFAULT");
-gen_record_default(_, _) ->
- true.
+gen_hrlhead(Mod) ->
+ emit(["%% Generated by the Erlang ASN.1 compiler. Version: ",
+ asn1ct:vsn(),nl,
+ "%% Purpose: Erlang record definitions for each named and unnamed",nl,
+ "%% SEQUENCE and SET, and macro definitions for each value",nl,
+ "%% definition in module ",Mod,".",nl,nl]).
%% May only be a list or a two-tuple.
to_textual_order({Root,Ext}) ->
@@ -1585,27 +1504,19 @@ constructed_suffix('SEQUENCE OF',_) ->
constructed_suffix('SET OF',_) ->
'SETOF'.
-erule(ber) -> ber;
-erule(per) -> per;
-erule(uper) -> per.
-
index2suffix(0) ->
"";
index2suffix(N) ->
lists:concat(["_",N]).
-ct_gen_module(ber) ->
+ct_gen_module(#gen{erule=ber}) ->
asn1ct_gen_ber_bin_v2;
-ct_gen_module(per) ->
- asn1ct_gen_per;
-ct_gen_module(uper) ->
+ct_gen_module(#gen{erule=per}) ->
asn1ct_gen_per.
-ct_constructed_module(ber) ->
+ct_constructed_module(#gen{erule=ber}) ->
asn1ct_constructed_ber_bin_v2;
-ct_constructed_module(per) ->
- asn1ct_constructed_per;
-ct_constructed_module(uper) ->
+ct_constructed_module(#gen{erule=per}) ->
asn1ct_constructed_per.
get_constraint(C,Key) ->
@@ -1617,19 +1528,9 @@ get_constraint(C,Key) ->
{value,Cnstr} ->
Cnstr
end.
-
-get_record_name_prefix() ->
- case lists:keysearch(record_name_prefix,1,get(encoding_options)) of
- false ->
- "";
- {value,{_,Prefix}} ->
- Prefix
- end.
-get_macro_name_prefix() ->
- case lists:keysearch(macro_name_prefix,1,get(encoding_options)) of
- false ->
- "";
- {value,{_,Prefix}} ->
- Prefix
- end.
+get_record_name_prefix(#gen{rec_prefix=Prefix}) ->
+ Prefix.
+
+get_macro_name_prefix(#gen{macro_prefix=Prefix}) ->
+ Prefix.
diff --git a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
index b884d14b0d..6c6d4193f3 100644
--- a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
@@ -1200,11 +1200,13 @@ gen_objset_enc(Erules, ObjSetName, UniqueName,
{no_mod,no_name} ->
gen_inlined_enc_funs(Fields, ClFields, ObjSetName, Val, NthObj);
{CurrMod,Name} ->
- emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ emit(["'getenc_",ObjSetName,"'(Id) when Id =:= ",
+ {asis,Val}," ->",nl,
" fun 'enc_",Name,"'/3;",nl]),
{[],NthObj};
{ModuleName,Name} ->
- emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]),
+ emit(["'getenc_",ObjSetName,"'(Id) when Id =:= ",
+ {asis,Val}," ->",nl]),
emit_ext_fun(enc,ModuleName,Name),
emit([";",nl]),
{[],NthObj};
@@ -1382,11 +1384,13 @@ gen_objset_dec(Erules, ObjSName, UniqueName, [{ObjName,Val,Fields}|T],
{no_mod,no_name} ->
gen_inlined_dec_funs(Fields,ClFields,ObjSName,Val,NthObj);
{CurrMod,Name} ->
- emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl,
+ emit(["'getdec_",ObjSName,"'(Id) when Id =:= ",
+ {asis,Val}," ->",nl,
" fun 'dec_",Name,"'/3;", nl]),
NthObj;
{ModuleName,Name} ->
- emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]),
+ emit(["'getdec_",ObjSName,"'(Id) when Id =:= ",
+ {asis,Val}," ->",nl]),
emit_ext_fun(dec,ModuleName,Name),
emit([";",nl]),
NthObj;
diff --git a/lib/asn1/src/asn1ct_gen_check.erl b/lib/asn1/src/asn1ct_gen_check.erl
index abe77dd0cb..ccc62a3ce3 100644
--- a/lib/asn1/src/asn1ct_gen_check.erl
+++ b/lib/asn1/src/asn1ct_gen_check.erl
@@ -21,45 +21,51 @@
%%
-module(asn1ct_gen_check).
--export([emit/3]).
+-export([emit/4]).
-import(asn1ct_gen, [emit/1]).
-include("asn1_records.hrl").
-emit(Type, Default, Value) ->
+emit(Gen, Type, Default, Value) ->
Key = {Type,Default},
- Gen = fun(Fd, Name) ->
- file:write(Fd, gen(Name, Type, Default))
- end,
+ DoGen = fun(Fd, Name) ->
+ file:write(Fd, gen(Gen, Name, Type, Default))
+ end,
emit(" case "),
- asn1ct_func:call_gen("is_default_", Key, Gen, [Value]),
+ asn1ct_func:call_gen("is_default_", Key, DoGen, [Value]),
emit([" of",nl,
"true -> {[],0};",nl,
"false ->",nl]).
-gen(Name, #type{def=T}, Default) ->
+gen(#gen{pack=Pack}=Gen, Name, #type{def=T}, Default) ->
+ DefMarker = case Pack of
+ record -> "asn1_DEFAULT";
+ map -> atom_to_list(?MISSING_IN_MAP)
+ end,
NameStr = atom_to_list(Name),
- [NameStr,"(asn1_DEFAULT) ->\n",
- "true;\n"|case do_gen(T, Default) of
- {literal,Literal} ->
- [NameStr,"(",term2str(Literal),") ->\n","true;\n",
- NameStr,"(_) ->\n","false.\n\n"];
- {exception,Func,Args} ->
- [NameStr,"(Value) ->\n",
- "try ",Func,"(Value",arg2str(Args),") of\n",
- "_ -> true\n"
- "catch throw:false -> false\n"
- "end.\n\n"]
- end].
+ [NameStr,"(",DefMarker,") ->\n",
+ "true;\n"|
+ case do_gen(Gen, T, Default) of
+ {literal,Literal} ->
+ [NameStr,"(Def) when Def =:= ",term2str(Literal)," ->\n",
+ "true;\n",
+ NameStr,"(_) ->\n","false.\n\n"];
+ {exception,Func,Args} ->
+ [NameStr,"(Value) ->\n",
+ "try ",Func,"(Value",arg2str(Args),") of\n",
+ "_ -> true\n"
+ "catch throw:false -> false\n"
+ "end.\n\n"]
+ end].
-do_gen(_, asn1_NOVALUE) ->
+do_gen(_Gen, _, asn1_NOVALUE) ->
{literal,asn1_NOVALUE};
-do_gen(#'Externaltypereference'{module=M,type=T}, Default) ->
+do_gen(Gen, #'Externaltypereference'{module=M,type=T}, Default) ->
#typedef{typespec=#type{def=Td}} = asn1_db:dbget(M, T),
- do_gen(Td, Default);
-do_gen('BOOLEAN', Default) ->
+ do_gen(Gen, Td, Default);
+do_gen(_Gen, 'BOOLEAN', Default) ->
{literal,Default};
-do_gen({'BIT STRING',[]}, Default) ->
+do_gen(_Gen, {'BIT STRING',[]}, Default) ->
true = is_bitstring(Default), %Assertion.
case asn1ct:use_legacy_types() of
false ->
@@ -67,17 +73,17 @@ do_gen({'BIT STRING',[]}, Default) ->
true ->
{exception,need(check_legacy_bitstring, 2),[Default]}
end;
-do_gen({'BIT STRING',[_|_]=NBL}, Default) ->
+do_gen(_Gen, {'BIT STRING',[_|_]=NBL}, Default) ->
do_named_bitstring(NBL, Default);
-do_gen({'ENUMERATED',_}, Default) ->
+do_gen(_Gen, {'ENUMERATED',_}, Default) ->
{literal,Default};
-do_gen('INTEGER', Default) ->
+do_gen(_Gen, 'INTEGER', Default) ->
{literal,Default};
-do_gen({'INTEGER',NNL}, Default) ->
+do_gen(_Gen, {'INTEGER',NNL}, Default) ->
{exception,need(check_int, 3),[Default,NNL]};
-do_gen('NULL', Default) ->
+do_gen(_Gen, 'NULL', Default) ->
{literal,Default};
-do_gen('OCTET STRING', Default) ->
+do_gen(_Gen, 'OCTET STRING', Default) ->
true = is_binary(Default), %Assertion.
case asn1ct:use_legacy_types() of
false ->
@@ -85,34 +91,34 @@ do_gen('OCTET STRING', Default) ->
true ->
{exception,need(check_octetstring, 2),[Default]}
end;
-do_gen('OBJECT IDENTIFIER', Default0) ->
+do_gen(_Gen, 'OBJECT IDENTIFIER', Default0) ->
Default = pre_process_oid(Default0),
{exception,need(check_objectidentifier, 2),[Default]};
-do_gen({'CHOICE',Cs}, Default) ->
+do_gen(Gen, {'CHOICE',Cs}, Default) ->
{Tag,Value} = Default,
[Type] = [Type || #'ComponentType'{name=T,typespec=Type} <- Cs,
T =:= Tag],
- case do_gen(Type#type.def, Value) of
+ case do_gen(Gen, Type#type.def, Value) of
{literal,Lit} ->
{literal,{Tag,Lit}};
{exception,Func0,Args} ->
Key = {Tag,Func0,Args},
- Gen = fun(Fd, Name) ->
- S = gen_choice(Name, Tag, Func0, Args),
- ok = file:write(Fd, S)
+ DoGen = fun(Fd, Name) ->
+ S = gen_choice(Name, Tag, Func0, Args),
+ ok = file:write(Fd, S)
end,
- Func = asn1ct_func:call_gen("is_default_choice", Key, Gen),
+ Func = asn1ct_func:call_gen("is_default_choice", Key, DoGen),
{exception,atom_to_list(Func),[]}
end;
-do_gen(#'SEQUENCE'{components=Cs}, Default) ->
- do_seq_set(Cs, Default);
-do_gen({'SEQUENCE OF',Type}, Default) ->
- do_sof(Type, Default);
-do_gen(#'SET'{components=Cs}, Default) ->
- do_seq_set(Cs, Default);
-do_gen({'SET OF',Type}, Default) ->
- do_sof(Type, Default);
-do_gen(Type, Default) ->
+do_gen(Gen, #'SEQUENCE'{components=Cs}, Default) ->
+ do_seq_set(Gen, Cs, Default);
+do_gen(Gen, {'SEQUENCE OF',Type}, Default) ->
+ do_sof(Gen, Type, Default);
+do_gen(Gen, #'SET'{components=Cs}, Default) ->
+ do_seq_set(Gen, Cs, Default);
+do_gen(Gen, {'SET OF',Type}, Default) ->
+ do_sof(Gen, Type, Default);
+do_gen(_Gen, Type, Default) ->
case asn1ct_gen:unify_if_string(Type) of
restrictedstring ->
{exception,need(check_restrictedstring, 2),[Default]};
@@ -136,39 +142,58 @@ do_named_bitstring(_, Default) when is_bitstring(Default) ->
end,
{exception,need(Func, 3),[Default,bit_size(Default)]}.
-do_seq_set(Cs0, Default) ->
+do_seq_set(#gen{pack=record}=Gen, Cs0, Default) ->
Tag = element(1, Default),
Cs1 = [T || #'ComponentType'{typespec=T} <- Cs0],
- Cs = components(Cs1, tl(tuple_to_list(Default))),
+ Cs = components(Gen, Cs1, tl(tuple_to_list(Default))),
case are_all_literals(Cs) of
true ->
Literal = list_to_tuple([Tag|[L || {literal,L} <- Cs]]),
{literal,Literal};
false ->
Key = {Cs,Default},
- Gen = fun(Fd, Name) ->
- S = gen_components(Name, Tag, Cs),
- ok = file:write(Fd, S)
- end,
- Func = asn1ct_func:call_gen("is_default_cs_", Key, Gen),
+ DoGen = fun(Fd, Name) ->
+ S = gen_components(Name, Tag, Cs),
+ ok = file:write(Fd, S)
+ end,
+ Func = asn1ct_func:call_gen("is_default_cs_", Key, DoGen),
+ {exception,atom_to_list(Func),[]}
+ end;
+do_seq_set(#gen{pack=map}=Gen, Cs0, Default) ->
+ Cs1 = [{N,T} || #'ComponentType'{name=N,typespec=T} <- Cs0],
+ Cs = map_components(Gen, Cs1, Default),
+ AllLiterals = lists:all(fun({_,{literal,_}}) -> true;
+ ({_,_}) -> false
+ end, Cs),
+ case AllLiterals of
+ true ->
+ L = [{Name,Lit} || {Name,{literal,Lit}} <- Cs],
+ {literal,maps:from_list(L)};
+ false ->
+ Key = {Cs,Default},
+ DoGen = fun(Fd, Name) ->
+ S = gen_map_components(Name, Cs),
+ ok = file:write(Fd, S)
+ end,
+ Func = asn1ct_func:call_gen("is_default_cs_", Key, DoGen),
{exception,atom_to_list(Func),[]}
end.
-do_sof(Type, Default0) ->
+do_sof(Gen, Type, Default0) ->
Default = lists:sort(Default0),
Cs0 = lists:duplicate(length(Default), Type),
- Cs = components(Cs0, Default),
+ Cs = components(Gen, Cs0, Default),
case are_all_literals(Cs) of
true ->
Literal = [Lit || {literal,Lit} <- Cs],
{exception,need(check_literal_sof, 2),[Literal]};
false ->
Key = Cs,
- Gen = fun(Fd, Name) ->
- S = gen_sof(Name, Cs),
- ok = file:write(Fd, S)
+ DoGen = fun(Fd, Name) ->
+ S = gen_sof(Name, Cs),
+ ok = file:write(Fd, S)
end,
- Func = asn1ct_func:call_gen("is_default_sof", Key, Gen),
+ Func = asn1ct_func:call_gen("is_default_sof", Key, DoGen),
{exception,atom_to_list(Func),[]}
end.
@@ -199,6 +224,39 @@ gen_cs_2([], _) ->
"throw(false)\n"
"end.\n"].
+gen_map_components(Name, Cs) ->
+ [atom_to_list(Name),"(Value) ->\n",
+ "case Value of\n",
+ "#{"|gen_map_cs_1(Cs, 1, "", [])].
+
+gen_map_cs_1([{Name,{literal,Lit}}|T], I, Sep, Acc) ->
+ Var = "E"++integer_to_list(I),
+ G = Var ++ " =:= " ++ term2str(Lit),
+ [Sep,term2str(Name),":=",Var|
+ gen_map_cs_1(T, I+1, ",\n", [{guard,G}|Acc])];
+gen_map_cs_1([{Name,Exc}|T], I, Sep, Acc) ->
+ Var = "E"++integer_to_list(I),
+ [Sep,term2str(Name),":=",Var|
+ gen_map_cs_1(T, I+1, ",\n", [{exc,{Var,Exc}}|Acc])];
+gen_map_cs_1([], _, _, Acc) ->
+ G = lists:join(", ", [S || {guard,S} <- Acc]),
+ Exc = [E || {exc,E} <- Acc],
+ Body = gen_map_cs_2(Exc, ""),
+ case G of
+ [] ->
+ ["} ->\n"|Body];
+ [_|_] ->
+ ["} when ",G," ->\n"|Body]
+ end.
+
+gen_map_cs_2([{Var,{exception,Func,Args}}|T], Sep) ->
+ [Sep,Func,"(",Var,arg2str(Args),")"|gen_map_cs_2(T, ",\n")];
+gen_map_cs_2([], _) ->
+ [";\n",
+ "_ ->\n"
+ "throw(false)\n"
+ "end.\n"].
+
gen_sof(Name, Cs) ->
[atom_to_list(Name),"(Value) ->\n",
"case length(Value) of\n",
@@ -221,9 +279,18 @@ gen_sof_1([{exception,Func,Args}|Cs], I) ->
gen_sof_1([], _) ->
".\n".
-components([#type{def=Def}|Ts], [V|Vs]) ->
- [do_gen(Def, V)|components(Ts, Vs)];
-components([], []) -> [].
+components(Gen, [#type{def=Def}|Ts], [V|Vs]) ->
+ [do_gen(Gen, Def, V)|components(Gen, Ts, Vs)];
+components(_Gen, [], []) -> [].
+
+map_components(Gen, [{Name,#type{def=Def}}|Ts], Value) ->
+ case maps:find(Name, Value) of
+ {ok,V} ->
+ [{Name,do_gen(Gen, Def, V)}|map_components(Gen, Ts, Value)];
+ error ->
+ map_components(Gen, Ts, Value)
+ end;
+map_components(_Gen, [], _Value) -> [].
gen_choice(Name, Tag, Func, Args) ->
NameStr = atom_to_list(Name),
diff --git a/lib/asn1/src/asn1ct_gen_per.erl b/lib/asn1/src/asn1ct_gen_per.erl
index aa7223904e..9671a566bf 100644
--- a/lib/asn1/src/asn1ct_gen_per.erl
+++ b/lib/asn1/src/asn1ct_gen_per.erl
@@ -113,11 +113,7 @@ gen_encode_prim(Erules, D) ->
Value = {var,atom_to_list(asn1ct_gen:mk_var(asn1ct_name:curr(val)))},
gen_encode_prim(Erules, D, Value).
-gen_encode_prim(Erules, #type{}=D, Value) ->
- Aligned = case Erules of
- uper -> false;
- per -> true
- end,
+gen_encode_prim(#gen{erule=per,aligned=Aligned}, #type{}=D, Value) ->
Imm = gen_encode_prim_imm(Value, D, Aligned),
asn1ct_imm:enc_cg(Imm, Aligned).
@@ -284,11 +280,7 @@ gen_dec_external(Ext, BytesVar) ->
_ -> [{asis,Mod},":"]
end,{asis,dec_func(Type)},"(",BytesVar,")"]).
-gen_dec_imm(Erule, #type{def=Name,constraint=C}) ->
- Aligned = case Erule of
- uper -> false;
- per -> true
- end,
+gen_dec_imm(#gen{erule=per,aligned=Aligned}, #type{def=Name,constraint=C}) ->
gen_dec_imm_1(Name, C, Aligned).
gen_dec_imm_1('ASN1_OPEN_TYPE', Constraint, Aligned) ->
diff --git a/lib/asn1/src/asn1ct_imm.erl b/lib/asn1/src/asn1ct_imm.erl
index 8b96242c56..130f68c21d 100644
--- a/lib/asn1/src/asn1ct_imm.erl
+++ b/lib/asn1/src/asn1ct_imm.erl
@@ -37,9 +37,12 @@
per_enc_open_type/2,
per_enc_restricted_string/3,
per_enc_small_number/2]).
--export([per_enc_extension_bit/2,per_enc_extensions/4,per_enc_optional/3]).
+-export([per_enc_extension_bit/2,per_enc_extensions/4,
+ per_enc_extensions_map/4,
+ per_enc_optional/2]).
-export([per_enc_sof/5]).
--export([enc_absent/3,enc_append/1,enc_element/2]).
+-export([enc_absent/3,enc_append/1,enc_element/2,enc_maps_get/2,
+ enc_comment/1]).
-export([enc_cg/2]).
-export([optimize_alignment/1,optimize_alignment/2,
dec_slim_cg/2,dec_code_gen/2]).
@@ -214,7 +217,8 @@ per_enc_legacy_bit_string(Val0, NNL0, Constraint0, Aligned) ->
per_enc_boolean(Val0, _Aligned) ->
{B,[Val]} = mk_vars(Val0, []),
B++build_cond([[{eq,Val,false},{put_bits,0,1,[1]}],
- [{eq,Val,true},{put_bits,1,1,[1]}]]).
+ [{eq,Val,true},{put_bits,1,1,[1]}],
+ ['_',{error,{illegal_boolean,Val}}]]).
per_enc_choice(Val0, Cs0, _Aligned) ->
{B,[Val]} = mk_vars(Val0, []),
@@ -235,7 +239,7 @@ per_enc_enumerated(Val0, Root, Aligned) ->
B++[{'cond',Cs++enumerated_error(Val)}].
enumerated_error(Val) ->
- [['_',{error,Val}]].
+ [['_',{error,{illegal_enumerated,Val}}]].
per_enc_integer(Val0, Constraint0, Aligned) ->
{B,[Val]} = mk_vars(Val0, []),
@@ -349,27 +353,32 @@ per_enc_extensions(Val0, Pos0, NumBits, Aligned) when NumBits > 0 ->
['_'|Length ++ PutBits]]}],
{var,"Extensions"}}].
-per_enc_optional(Val0, {Pos,DefVals}, _Aligned) when is_integer(Pos),
- is_list(DefVals) ->
- {B,Val} = enc_element(Pos, Val0),
+per_enc_extensions_map(Val0, Vars, Undefined, Aligned) ->
+ NumBits = length(Vars),
+ {B,[_Val,Bitmap]} = mk_vars(Val0, [bitmap]),
+ Length = per_enc_small_length(NumBits, Aligned),
+ PutBits = case NumBits of
+ 1 -> [{put_bits,1,1,[1]}];
+ _ -> [{put_bits,Bitmap,NumBits,[1]}]
+ end,
+ BitmapExpr = extensions_bitmap(Vars, Undefined),
+ B++[{assign,Bitmap,BitmapExpr},
+ {list,[{'cond',[[{eq,Bitmap,0}],
+ ['_'|Length ++ PutBits]]}],
+ {var,"Extensions"}}].
+
+per_enc_optional(Val, DefVals) when is_list(DefVals) ->
Zero = {put_bits,0,1,[1]},
One = {put_bits,1,1,[1]},
- B++[{'cond',
- [[{eq,Val,DefVal},Zero] || DefVal <- DefVals] ++ [['_',One]]}];
-per_enc_optional(Val0, {Pos,{call,M,F,A}}, _Aligned) when is_integer(Pos) ->
- {B,Val} = enc_element(Pos, Val0),
+ [{'cond',
+ [[{eq,Val,DefVal},Zero] || DefVal <- DefVals] ++ [['_',One]]}];
+per_enc_optional(Val, {call,M,F,A}) ->
{[],[[],Tmp]} = mk_vars([], [tmp]),
Zero = {put_bits,0,1,[1]},
One = {put_bits,1,1,[1]},
- B++[{call,M,F,[Val|A],Tmp},
- {'cond',
- [[{eq,Tmp,true},Zero],['_',One]]}];
-per_enc_optional(Val0, Pos, _Aligned) when is_integer(Pos) ->
- {B,Val} = enc_element(Pos, Val0),
- Zero = {put_bits,0,1,[1]},
- One = {put_bits,1,1,[1]},
- B++[{'cond',[[{eq,Val,asn1_NOVALUE},Zero],
- ['_',One]]}].
+ [{call,M,F,[Val|A],Tmp},
+ {'cond',
+ [[{eq,Tmp,true},Zero],['_',One]]}].
per_enc_sof(Val0, Constraint, ElementVar, ElementImm, Aligned) ->
{B,[Val,Len]} = mk_vars(Val0, [len]),
@@ -423,6 +432,16 @@ enc_element(N, Val0) ->
{[],[Val,Dst]} = mk_vars(Val0, [element]),
{[{call,erlang,element,[N,Val],Dst}],Dst}.
+enc_maps_get(N, Val0) ->
+ {[],[Val,Dst0]} = mk_vars(Val0, [element]),
+ {var,Dst} = Dst0,
+ DstExpr = {expr,lists:concat(["#{",N,":=",Dst,"}"])},
+ {var,SrcVar} = Val,
+ {[{assign,DstExpr,SrcVar}],Dst0}.
+
+enc_comment(Comment) ->
+ {comment,Comment}.
+
enc_cg(Imm0, false) ->
Imm1 = enc_cse(Imm0),
Imm2 = enc_pre_cg(Imm1),
@@ -860,10 +879,8 @@ flatten_map_cs_1([integer_default], {Int,_}) ->
[{'_',Int}];
flatten_map_cs_1([enum_default], {Int,_}) ->
[{'_',["{asn1_enum,",Int,"}"]}];
-flatten_map_cs_1([enum_error], {Var,Cs}) ->
- Vs = [V || {_,V} <- Cs],
- [{'_',["exit({error,{asn1,{decode_enumerated,{",Var,",",
- {asis,Vs},"}}}})"]}];
+flatten_map_cs_1([enum_error], {Var,_}) ->
+ [{'_',["exit({error,{asn1,{decode_enumerated,",Var,"}}})"]}];
flatten_map_cs_1([], _) -> [].
flatten_hoist_align([[{align_bits,_,_}=Ab|T]|Cs]) ->
@@ -1037,6 +1054,7 @@ split_off_nonbuilding(Imm) ->
is_nonbuilding({assign,_,_}) -> true;
is_nonbuilding({call,_,_,_,_}) -> true;
+is_nonbuilding({comment,_}) -> true;
is_nonbuilding({lc,_,_,_,_}) -> true;
is_nonbuilding({set,_,_}) -> true;
is_nonbuilding({list,_,_}) -> true;
@@ -1093,7 +1111,7 @@ per_enc_integer_1(Val0, [{{_,_}=Constr,[]}], Aligned) ->
per_enc_integer_1(Val0, [Constr], Aligned) ->
{Prefix,Check,Action} = per_enc_integer_2(Val0, Constr, Aligned),
Prefix++build_cond([[Check|Action],
- ['_',{error,Val0}]]).
+ ['_',{error,{illegal_integer,Val0}}]]).
per_enc_integer_2(Val, {'SingleValue',Sv}, Aligned) when is_integer(Sv) ->
per_enc_constrained(Val, Sv, Sv, Aligned);
@@ -1240,6 +1258,20 @@ enc_length(Len, {Lb,Ub}, Aligned) when is_integer(Lb) ->
enc_length(Len, Sv, _Aligned) when is_integer(Sv) ->
[{'cond',[[{eq,Len,Sv}]]}].
+extensions_bitmap(Vs, Undefined) ->
+ Highest = 1 bsl (length(Vs)-1),
+ Cs = extensions_bitmap_1(Vs, Undefined, Highest),
+ lists:flatten(lists:join(" bor ", Cs)).
+
+extensions_bitmap_1([{var,V}|Vs], Undefined, Power) ->
+ S = ["case ",V," of\n",
+ " ",Undefined," -> 0;\n"
+ " _ -> ",integer_to_list(Power),"\n"
+ "end"],
+ [S|extensions_bitmap_1(Vs, Undefined, Power bsr 1)];
+extensions_bitmap_1([], _, _) ->
+ [].
+
put_bits_binary(Bin, _Unit, Aligned) when is_binary(Bin) ->
Sz = byte_size(Bin),
<<Int:Sz/unit:8>> = Bin,
@@ -1903,6 +1935,8 @@ enc_opt({'cond',Cs0}, St0) ->
{Cs,Type} = enc_opt_cond_1(Cs1, Type0, [{Cond,Imm}]),
{{'cond',Cs},St0#ost{t=Type}}
end;
+enc_opt({comment,_}=Imm, St) ->
+ {Imm,St#ost{t=undefined}};
enc_opt({cons,H0,T0}, St0) ->
{H,#ost{t=TypeH}=St1} = enc_opt(H0, St0),
{T,#ost{t=TypeT}=St} = enc_opt(T0, St1),
@@ -2292,6 +2326,9 @@ enc_cg({block,Imm}) ->
enc_cg(Imm),
emit([nl,
"end"]);
+enc_cg({seq,{comment,Comment},Then}) ->
+ emit(["%% ",Comment,nl]),
+ enc_cg(Then);
enc_cg({seq,First,Then}) ->
enc_cg(First),
emit([com,nl]),
@@ -2325,9 +2362,9 @@ enc_cg({'cond',Cs}) ->
enc_cg_cond(Cs);
enc_cg({error,Error}) when is_function(Error, 0) ->
Error();
-enc_cg({error,Var0}) ->
+enc_cg({error,{Tag,Var0}}) ->
Var = mk_val(Var0),
- emit(["exit({error,{asn1,{illegal_value,",Var,"}}})"]);
+ emit(["exit({error,{asn1,{",Tag,",",Var,"}}})"]);
enc_cg({integer,Int}) ->
emit(mk_val(Int));
enc_cg({lc,Body,Var,List}) ->
@@ -2590,6 +2627,8 @@ enc_opt_al({call,per_common,encode_unconstrained_number,[_]}=Call, _) ->
{[Call],0};
enc_opt_al({call,_,_,_,_}=Call, Al) ->
{[Call],Al};
+enc_opt_al({comment,_}=Imm, Al) ->
+ {[Imm],Al};
enc_opt_al({'cond',Cs0}, Al0) ->
{Cs,Al} = enc_opt_al_cond(Cs0, Al0),
{[{'cond',Cs}],Al};
@@ -2686,6 +2725,8 @@ per_fixup([{block,Block}|T]) ->
[{block,per_fixup(Block)}|per_fixup(T)];
per_fixup([{'assign',_,_}=H|T]) ->
[H|per_fixup(T)];
+per_fixup([{comment,_}=H|T]) ->
+ [H|per_fixup(T)];
per_fixup([{'cond',Cs0}|T]) ->
Cs = [[C|per_fixup(Act)] || [C|Act] <- Cs0],
[{'cond',Cs}|per_fixup(T)];
diff --git a/lib/asn1/src/asn1ct_value.erl b/lib/asn1/src/asn1ct_value.erl
index 57cd3f8af6..8bd99d995b 100644
--- a/lib/asn1/src/asn1ct_value.erl
+++ b/lib/asn1/src/asn1ct_value.erl
@@ -19,7 +19,6 @@
%%
%%
-module(asn1ct_value).
--compile([{nowarn_deprecated_function,{asn1rt,utf8_list_to_binary,1}}]).
%% Generate Erlang values for ASN.1 types.
%% The value is randomized within it's constraints
@@ -65,7 +64,11 @@ from_type(M,Typename,Type) when is_record(Type,type) ->
end;
{constructed,bif} when Typename == ['EXTERNAL'] ->
Val=from_type_constructed(M,Typename,InnerType,Type),
- asn1ct_eval_ext:transform_to_EXTERNAL1994(Val);
+ T = case M:maps() of
+ false -> transform_to_EXTERNAL1994;
+ true -> transform_to_EXTERNAL1994_maps
+ end,
+ asn1ct_eval_ext:T(Val);
{constructed,bif} ->
from_type_constructed(M,Typename,InnerType,Type)
end;
@@ -119,11 +122,13 @@ get_sequence(M,Typename,Type) ->
#'SEQUENCE'{components=Cl} -> {'SEQUENCE',Cl};
#'SET'{components=Cl} -> {'SET',to_textual_order(Cl)}
end,
- case get_components(M,Typename,CompList) of
- [] ->
- {list_to_atom(asn1ct_gen:list2rname(Typename))};
- C ->
- list_to_tuple([list_to_atom(asn1ct_gen:list2rname(Typename))|C])
+ Cs = get_components(M, Typename, CompList),
+ case M:maps() of
+ false ->
+ RecordTag = list_to_atom(asn1ct_gen:list2rname(Typename)),
+ list_to_tuple([RecordTag|[Val || {_,Val} <- Cs]]);
+ true ->
+ maps:from_list(Cs)
end.
get_components(M,Typename,{Root,Ext}) ->
@@ -131,9 +136,9 @@ get_components(M,Typename,{Root,Ext}) ->
%% Should enhance this *** HERE *** with proper handling of extensions
-get_components(M,Typename,[H|T]) ->
- [from_type(M,Typename,H)|
- get_components(M,Typename,T)];
+get_components(M, Typename, [H|T]) ->
+ #'ComponentType'{name=Name} = H,
+ [{Name,from_type(M, Typename, H)}|get_components(M, Typename, T)];
get_components(_,_,[]) ->
[].
@@ -292,8 +297,10 @@ from_type_prim(M, D) ->
'BMPString' ->
adjust_list(size_random(C),c_string(C,"BMPString"));
'UTF8String' ->
- {ok,Res}=asn1rt:utf8_list_to_binary(adjust_list(random(50),[$U,$T,$F,$8,$S,$t,$r,$i,$n,$g,16#ffff,16#fffffff,16#ffffff,16#fffff,16#fff])),
- Res;
+ L = adjust_list(random(50),
+ [$U,$T,$F,$8,$S,$t,$r,$i,$n,$g,
+ 16#ffff,16#ffee,16#10ffff,16#ffff,16#fff]),
+ unicode:characters_to_binary(L);
'UniversalString' ->
adjust_list(size_random(C),c_string(C,"UniversalString"));
XX ->
diff --git a/lib/asn1/src/asn1rt.erl b/lib/asn1/src/asn1rt.erl
deleted file mode 100644
index 3e09ce2252..0000000000
--- a/lib/asn1/src/asn1rt.erl
+++ /dev/null
@@ -1,184 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%
--module(asn1rt).
--deprecated(module).
-
-%% Runtime functions for ASN.1 (i.e encode, decode)
-
--export([encode/2,encode/3,decode/3,load_driver/0,unload_driver/0,info/1]).
-
--export([utf8_binary_to_list/1,utf8_list_to_binary/1]).
-
-encode(Module,{Type,Term}) ->
- encode(Module,Type,Term).
-
-encode(Module,Type,Term) ->
- case catch apply(Module,encode,[Type,Term]) of
- {'EXIT',undef} ->
- {error,{asn1,{undef,Module,Type}}};
- Result ->
- Result
- end.
-
-decode(Module,Type,Bytes) ->
- case catch apply(Module,decode,[Type,Bytes]) of
- {'EXIT',undef} ->
- {error,{asn1,{undef,Module,Type}}};
- Result ->
- Result
- end.
-
-%% Remove in R16A
-load_driver() ->
- ok.
-
-unload_driver() ->
- ok.
-
-info(Module) ->
- case catch apply(Module,info,[]) of
- {'EXIT',{undef,_Reason}} ->
- {error,{asn1,{undef,Module,info}}};
- Result ->
- {ok,Result}
- end.
-
-%% utf8_binary_to_list/1 transforms a utf8 encoded binary to a list of
-%% unicode elements, where each element is the unicode integer value
-%% of a utf8 character.
-%% Bin is a utf8 encoded value. The return value is either {ok,Val} or
-%% {error,Reason}. Val is a list of integers, where each integer is a
-%% unicode character value.
-utf8_binary_to_list(Bin) when is_binary(Bin) ->
- utf8_binary_to_list(Bin,[]).
-
-utf8_binary_to_list(<<>>,Acc) ->
- {ok,lists:reverse(Acc)};
-utf8_binary_to_list(Bin,Acc) ->
- Len = utf8_binary_len(Bin),
- case catch split_binary(Bin,Len) of
- {CharBin,RestBin} ->
- case utf8_binary_char(CharBin) of
- C when is_integer(C) ->
- utf8_binary_to_list(RestBin,[C|Acc]);
- Err -> Err
- end;
- Err -> {error,{asn1,{bad_encoded_utf8string,Err}}}
- end.
-
-utf8_binary_len(<<0:1,_:7,_/binary>>) ->
- 1;
-utf8_binary_len(<<1:1,1:1,0:1,_:5,_/binary>>) ->
- 2;
-utf8_binary_len(<<1:1,1:1,1:1,0:1,_:4,_/binary>>) ->
- 3;
-utf8_binary_len(<<1:1,1:1,1:1,1:1,0:1,_:3,_/binary>>) ->
- 4;
-utf8_binary_len(<<1:1,1:1,1:1,1:1,1:1,0:1,_:2,_/binary>>) ->
- 5;
-utf8_binary_len(<<1:1,1:1,1:1,1:1,1:1,1:1,0:1,_:1,_/binary>>) ->
- 6;
-utf8_binary_len(Bin) ->
- {error,{asn1,{bad_utf8_length,Bin}}}.
-
-utf8_binary_char(<<0:1,Int:7>>) ->
- Int;
-utf8_binary_char(<<_:2,0:1,Int1:5,1:1,0:1,Int2:6>>) ->
- (Int1 bsl 6) bor Int2;
-utf8_binary_char(<<_:3,0:1,Int1:4,1:1,0:1,Int2:6,1:1,0:1,Int3:6>>) ->
- <<Res:16>> = <<Int1:4,Int2:6,Int3:6>>,
- Res;
-utf8_binary_char(<<_:4,0:1,Int1:3,Rest/binary>>) ->
- <<1:1,0:1,Int2:6,1:1,0:1,Int3:6,1:1,0:1,Int4:6>> = Rest,
- <<Res:24>> = <<0:3,Int1:3,Int2:6,Int3:6,Int4:6>>,
- Res;
-utf8_binary_char(<<_:5,0:1,Int1:2,Rest/binary>>) ->
- <<1:1,0:1,Int2:6,1:1,0:1,Int3:6,1:1,0:1,Int4:6,1:1,0:1,Int5:6>> = Rest,
- <<Res:32>> = <<0:6,Int1:2,Int2:6,Int3:6,Int4:6,Int5:6>>,
- Res;
-utf8_binary_char(<<_:6,0:1,I:1,Rest/binary>>) ->
- <<1:1,0:1,Int2:6,1:1,0:1,Int3:6,1:1,0:1,Int4:6,1:1,0:1,
- Int5:6,1:1,0:1,Int6:6>> = Rest,
- <<Res:32>> = <<0:1,I:1,Int2:6,Int3:6,Int4:6,Int5:6,Int6:6>>,
- Res;
-utf8_binary_char(Err) ->
- {error,{asn1,{bad_utf8_character_encoding,Err}}}.
-
-
-%% macros used for utf8 encoding
--define(bit1to6_into_utf8byte(I),16#80 bor (I band 16#3f)).
--define(bit7to12_into_utf8byte(I),16#80 bor ((I band 16#fc0) bsr 6)).
--define(bit13to18_into_utf8byte(I),16#80 bor ((I band 16#3f000) bsr 12)).
--define(bit19to24_into_utf8byte(I),16#80 bor ((Int band 16#fc0000) bsr 18)).
--define(bit25to30_into_utf8byte(I),16#80 bor ((Int band 16#3f000000) bsr 24)).
-
-%% utf8_list_to_binary/1 transforms a list of integers to a
-%% binary. Each element in the input list has the unicode (integer)
-%% value of an utf8 character.
-%% The return value is either {ok,Bin} or {error,Reason}. The
-%% resulting binary is utf8 encoded.
-utf8_list_to_binary(List) ->
- utf8_list_to_binary(List,[]).
-
-utf8_list_to_binary([],Acc) when is_list(Acc) ->
- {ok,list_to_binary(lists:reverse(Acc))};
-utf8_list_to_binary([],Acc) ->
- {error,{asn1,Acc}};
-utf8_list_to_binary([H|T],Acc) ->
- case catch utf8_encode(H,Acc) of
- NewAcc when is_list(NewAcc) ->
- utf8_list_to_binary(T,NewAcc);
- Err -> Err
- end.
-
-
-utf8_encode(Int,Acc) when Int < 128 ->
- %% range 16#00000000 - 16#0000007f
- %% utf8 encoding: 0xxxxxxx
- [Int|Acc];
-utf8_encode(Int,Acc) when Int < 16#800 ->
- %% range 16#00000080 - 16#000007ff
- %% utf8 encoding: 110xxxxx 10xxxxxx
- [?bit1to6_into_utf8byte(Int),16#c0 bor (Int bsr 6)|Acc];
-utf8_encode(Int,Acc) when Int < 16#10000 ->
- %% range 16#00000800 - 16#0000ffff
- %% utf8 encoding: 1110xxxx 10xxxxxx 10xxxxxx
- [?bit1to6_into_utf8byte(Int),?bit7to12_into_utf8byte(Int),
- 16#e0 bor ((Int band 16#f000) bsr 12)|Acc];
-utf8_encode(Int,Acc) when Int < 16#200000 ->
- %% range 16#00010000 - 16#001fffff
- %% utf8 encoding: 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx
- [?bit1to6_into_utf8byte(Int),?bit7to12_into_utf8byte(Int),
- ?bit13to18_into_utf8byte(Int),
- 16#f0 bor ((Int band 16#1c0000) bsr 18)|Acc];
-utf8_encode(Int,Acc) when Int < 16#4000000 ->
- %% range 16#00200000 - 16#03ffffff
- %% utf8 encoding: 111110xx 10xxxxxx 10xxxxxx 10xxxxxx 10xxxxxx
- [?bit1to6_into_utf8byte(Int),?bit7to12_into_utf8byte(Int),
- ?bit13to18_into_utf8byte(Int),?bit19to24_into_utf8byte(Int),
- 16#f8 bor ((Int band 16#3000000) bsr 24)|Acc];
-utf8_encode(Int,Acc) ->
- %% range 16#04000000 - 16#7fffffff
- %% utf8 encoding: 1111110x 10xxxxxx ...(total 6 bytes) 10xxxxxx
- [?bit1to6_into_utf8byte(Int),?bit7to12_into_utf8byte(Int),
- ?bit13to18_into_utf8byte(Int),?bit19to24_into_utf8byte(Int),
- ?bit25to30_into_utf8byte(Int),
- 16#fc bor ((Int band 16#40000000) bsr 30)|Acc].
diff --git a/lib/asn1/src/asn1rtt_ext.erl b/lib/asn1/src/asn1rtt_ext.erl
index 3bf01823db..161b2db691 100644
--- a/lib/asn1/src/asn1rtt_ext.erl
+++ b/lib/asn1/src/asn1rtt_ext.erl
@@ -19,7 +19,8 @@
%%
-module(asn1rtt_ext).
--export([transform_to_EXTERNAL1990/1,transform_to_EXTERNAL1994/1]).
+-export([transform_to_EXTERNAL1990/1,transform_to_EXTERNAL1990_maps/1,
+ transform_to_EXTERNAL1994/1,transform_to_EXTERNAL1994_maps/1]).
transform_to_EXTERNAL1990({_,_,_,_}=Val) ->
transform_to_EXTERNAL1990(tuple_to_list(Val), []);
@@ -51,6 +52,30 @@ transform_to_EXTERNAL1990([Data_value], Acc)
list_to_tuple(lists:reverse([{'octet-aligned',Data_value}|Acc])).
+transform_to_EXTERNAL1990_maps(#{identification:=Id,'data-value':=Value}=V) ->
+ M0 = case Id of
+ {syntax,DRef} ->
+ #{'direct-reference'=>DRef};
+ {'presentation-context-id',IndRef} ->
+ #{'indirect-reference'=>IndRef};
+ {'context-negotiation',
+ #{'presentation-context-id':=IndRef,
+ 'transfer-syntax':=DRef}} ->
+ #{'direct-reference'=>DRef,
+ 'indirect-reference'=>IndRef}
+ end,
+ M = case V of
+ #{'data-value-descriptor':=Dvd} ->
+ M0#{'data-value-descriptor'=>Dvd};
+ #{} ->
+ M0
+ end,
+ M#{encoding=>{'octet-aligned',Value}};
+transform_to_EXTERNAL1990_maps(#{encoding:=_}=V) ->
+ %% Already in the EXTERNAL 1990 format.
+ V.
+
+
transform_to_EXTERNAL1994({'EXTERNAL',DRef,IndRef,Data_v_desc,Encoding}=V) ->
Identification =
case {DRef,IndRef} of
@@ -71,3 +96,38 @@ transform_to_EXTERNAL1994({'EXTERNAL',DRef,IndRef,Data_v_desc,Encoding}=V) ->
%% information.
V
end.
+
+transform_to_EXTERNAL1994_maps(V0) ->
+ Identification =
+ case V0 of
+ #{'direct-reference':=DRef,
+ 'indirect-reference':=asn1_NOVALUE} ->
+ {syntax,DRef};
+ #{'direct-reference':=asn1_NOVALUE,
+ 'indirect-reference':=IndRef} ->
+ {'presentation-context-id',IndRef};
+ #{'direct-reference':=DRef,
+ 'indirect-reference':=IndRef} ->
+ {'context-negotiation',
+ #{'transfer-syntax'=>DRef,
+ 'presentation-context-id'=>IndRef}}
+ end,
+ case V0 of
+ #{encoding:={'octet-aligned',Val}}
+ when is_list(Val); is_binary(Val) ->
+ %% Transform to the EXTERNAL 1994 definition.
+ V = #{identification=>Identification,
+ 'data-value'=>Val},
+ case V0 of
+ #{'data-value-descriptor':=asn1_NOVALUE} ->
+ V;
+ #{'data-value-descriptor':=Dvd} ->
+ V#{'data-value-descriptor'=>Dvd}
+ end;
+ _ ->
+ %% Keep the EXTERNAL 1990 definition to avoid losing
+ %% information.
+ V = [{K,V} || {K,V} <- maps:to_list(V0),
+ V =/= asn1_NOVALUE],
+ maps:from_list(V)
+ end.
diff --git a/lib/asn1/src/asn1rtt_per_common.erl b/lib/asn1/src/asn1rtt_per_common.erl
index 3896cb7fa5..e7edfb1ee0 100644
--- a/lib/asn1/src/asn1rtt_per_common.erl
+++ b/lib/asn1/src/asn1rtt_per_common.erl
@@ -140,6 +140,8 @@ encode_relative_oid(Val) when is_tuple(Val) ->
encode_relative_oid(Val) when is_list(Val) ->
list_to_binary([e_object_element(X)||X <- Val]).
+encode_unconstrained_number(Val) when not is_integer(Val) ->
+ exit({error,{asn1,{illegal_integer,Val}}});
encode_unconstrained_number(Val) when Val >= 0 ->
if
Val < 16#80 ->
diff --git a/lib/asn1/test/Makefile b/lib/asn1/test/Makefile
index 40575e8a2f..afd063aa8e 100644
--- a/lib/asn1/test/Makefile
+++ b/lib/asn1/test/Makefile
@@ -82,6 +82,7 @@ MODULES= \
testInfObjExtract \
testParameterizedInfObj \
testFragmented \
+ testMaps \
testMergeCompile \
testMultipleLevels \
testDeepTConstr \
@@ -114,8 +115,7 @@ MODULES= \
testImporting \
testExtensibilityImplied \
asn1_test_lib \
- asn1_app_test \
- asn1_appup_test \
+ asn1_app_SUITE \
asn1_SUITE \
error_SUITE \
syntax_SUITE
diff --git a/lib/asn1/test/asn1_SUITE.erl b/lib/asn1/test/asn1_SUITE.erl
index b6430134ab..580c919b9d 100644
--- a/lib/asn1/test/asn1_SUITE.erl
+++ b/lib/asn1/test/asn1_SUITE.erl
@@ -21,6 +21,9 @@
-module(asn1_SUITE).
+%% Suppress compilation of an addititional module compiled for maps.
+-define(NO_MAPS_MODULE, asn1_test_lib_no_maps).
+
-define(only_ber(Func),
if Rule =:= ber -> Func;
true -> ok
@@ -39,10 +42,11 @@ suite() ->
{timetrap,{minutes,60}}].
all() ->
- [{group, compile},
+ [xref,
+ xref_export_all,
+
+ {group, compile},
{group, parallel},
- {group, app_test},
- {group, appup_test},
% TODO: Investigate parallel running of these:
testComment,
@@ -64,13 +68,8 @@ groups() ->
ber_optional,
tagdefault_automatic]},
- {app_test, [], [{asn1_app_test, all}]},
-
- {appup_test, [], [{asn1_appup_test, all}]},
-
{parallel, Parallel,
[cover,
- xref,
{group, ber},
% Uses 'P-Record', 'Constraints', 'MEDIA-GATEWAY-CONTROL'...
{group, [], [parse,
@@ -102,6 +101,7 @@ groups() ->
testMultipleLevels,
testOpt,
testSeqDefault,
+ testMaps,
% Uses 'External'
{group, [], [testExternal,
testSeqExtension]},
@@ -176,8 +176,11 @@ groups() ->
{performance, [],
[testTimer_ber,
+ testTimer_ber_maps,
testTimer_per,
- testTimer_uper]}].
+ testTimer_per_maps,
+ testTimer_uper,
+ testTimer_uper_maps]}].
%%------------------------------------------------------------------------------
%% Init/end
@@ -441,6 +444,16 @@ testDEFAULT(Config, Rule, Opts) ->
testDef:main(Rule),
testSeqSetDefaultVal:main(Rule, Opts).
+testMaps(Config) ->
+ test(Config, fun testMaps/3,
+ [{ber,[maps,no_ok_wrapper]},
+ {ber,[maps,der,no_ok_wrapper]},
+ {per,[maps,no_ok_wrapper]},
+ {uper,[maps,no_ok_wrapper]}]).
+testMaps(Config, Rule, Opts) ->
+ asn1_test_lib:compile_all(['Maps'], Config, [Rule|Opts]),
+ testMaps:main(Rule).
+
testOpt(Config) -> test(Config, fun testOpt/3).
testOpt(Config, Rule, Opts) ->
asn1_test_lib:compile("Opt", Config, [Rule|Opts]),
@@ -614,12 +627,12 @@ parse(Config) ->
[asn1_test_lib:compile(M, Config, [abs]) || M <- test_modules()].
per(Config) ->
- test(Config, fun per/3, [per,uper]).
+ test(Config, fun per/3, [per,uper,{per,[maps]},{uper,[maps]}]).
per(Config, Rule, Opts) ->
[module_test(M, Config, Rule, Opts) || M <- per_modules()].
ber_other(Config) ->
- test(Config, fun ber_other/3, [ber]).
+ test(Config, fun ber_other/3, [ber,{ber,[maps]}]).
ber_other(Config, Rule, Opts) ->
[module_test(M, Config, Rule, Opts) || M <- ber_modules()].
@@ -628,7 +641,7 @@ der(Config) ->
asn1_test_lib:compile_all(ber_modules(), Config, [der]).
module_test(M0, Config, Rule, Opts) ->
- asn1_test_lib:compile(M0, Config, [Rule|Opts]),
+ asn1_test_lib:compile(M0, Config, [Rule,?NO_MAPS_MODULE|Opts]),
case list_to_atom(M0) of
'LDAP' ->
%% Because of the recursive definition of 'Filter' in
@@ -995,7 +1008,9 @@ testS1AP(Config, Rule, Opts) ->
testRfcs() ->
[{timetrap,{minutes,90}}].
-testRfcs(Config) -> test(Config, fun testRfcs/3, [{ber,[der]}]).
+testRfcs(Config) -> test(Config, fun testRfcs/3,
+ [{ber,[der,?NO_MAPS_MODULE]},
+ {ber,[der,maps]}]).
testRfcs(Config, Rule, Opts) ->
case erlang:system_info(system_architecture) of
"sparc-sun-solaris2.10" ->
@@ -1010,7 +1025,8 @@ test_compile_options(Config) ->
ok = test_compile_options:path(Config),
ok = test_compile_options:noobj(Config),
ok = test_compile_options:record_name_prefix(Config),
- ok = test_compile_options:verbose(Config).
+ ok = test_compile_options:verbose(Config),
+ ok = test_compile_options:maps(Config).
testDoubleEllipses(Config) -> test(Config, fun testDoubleEllipses/3).
testDoubleEllipses(Config, Rule, Opts) ->
@@ -1027,18 +1043,6 @@ test_modified_x420(Config, Rule, Opts) ->
test_modified_x420:test(Config).
-testX420() ->
- [{timetrap,{minutes,90}}].
-testX420(Config) ->
- case erlang:system_info(system_architecture) of
- "sparc-sun-solaris2.10" ->
- {skip,"Too slow for an old Sparc"};
- _ ->
- Rule = ber,
- testX420:compile(Rule, [der], Config),
- ok = testX420:ticket7759(Rule, Config)
- end.
-
test_x691(Config) ->
test(Config, fun test_x691/3, [per, uper]).
test_x691(Config, Rule, Opts) ->
@@ -1069,7 +1073,7 @@ test_x691(Config, Rule, Opts) ->
ok.
ticket_6143(Config) ->
- ok = test_compile_options:ticket_6143(Config).
+ asn1_test_lib:compile("AA1", Config, [?NO_MAPS_MODULE]).
testExtensionAdditionGroup(Config) ->
test(Config, fun testExtensionAdditionGroup/3).
@@ -1157,20 +1161,33 @@ END
ok = asn1ct:compile(File, [{outdir, PrivDir}]).
-timer_compile(Config, Rule) ->
- asn1_test_lib:compile_all(["H235-SECURITY-MESSAGES", "H323-MESSAGES"],
- Config, [no_ok_wrapper,Rule]).
+timer_compile(Config, Opts0) ->
+ Files = ["H235-SECURITY-MESSAGES", "H323-MESSAGES"],
+ Opts = [no_ok_wrapper,?NO_MAPS_MODULE|Opts0],
+ asn1_test_lib:compile_all(Files, Config, Opts).
testTimer_ber(Config) ->
- timer_compile(Config, ber),
+ timer_compile(Config, [ber]),
testTimer:go().
testTimer_per(Config) ->
- timer_compile(Config, per),
+ timer_compile(Config, [per]),
testTimer:go().
testTimer_uper(Config) ->
- timer_compile(Config, uper),
+ timer_compile(Config, [uper]),
+ testTimer:go().
+
+testTimer_ber_maps(Config) ->
+ timer_compile(Config, [ber,maps]),
+ testTimer:go().
+
+testTimer_per_maps(Config) ->
+ timer_compile(Config, [per,maps]),
+ testTimer:go().
+
+testTimer_uper_maps(Config) ->
+ timer_compile(Config, [uper,maps]),
testTimer:go().
%% Test of multiple-line comment, OTP-8043
@@ -1179,9 +1196,11 @@ testComment(Config) ->
asn1_test_lib:roundtrip('Comment', 'Seq', {'Seq',12,true}).
testName2Number(Config) ->
- N2NOptions = [{n2n,Type} || Type <- ['CauseMisc', 'CauseProtocol',
- 'CauseRadioNetwork',
- 'CauseTransport','CauseNas']],
+ N2NOptions0 = [{n2n,Type} ||
+ Type <- ['CauseMisc', 'CauseProtocol',
+ 'CauseRadioNetwork',
+ 'CauseTransport','CauseNas']],
+ N2NOptions = [?NO_MAPS_MODULE|N2NOptions0],
asn1_test_lib:compile("S1AP-IEs", Config, N2NOptions),
0 = 'S1AP-IEs':name2num_CauseMisc('control-processing-overload'),
@@ -1191,8 +1210,9 @@ testName2Number(Config) ->
%% Test that n2n option generates name2num and num2name functions supporting
%% values not within the extension root if the enumeration type has an
%% extension marker.
- N2NOptionsExt = [{n2n, 'NoExt'}, {n2n, 'Ext'}, {n2n, 'Ext2'}],
+ N2NOptionsExt = [?NO_MAPS_MODULE,{n2n,'NoExt'},{n2n,'Ext'},{n2n,'Ext2'}],
asn1_test_lib:compile("EnumN2N", Config, N2NOptionsExt),
+
%% Previously, name2num and num2name was not generated if the type didn't
%% have an extension marker:
0 = 'EnumN2N':name2num_NoExt('blue'),
@@ -1210,9 +1230,11 @@ testName2Number(Config) ->
ok.
ticket_7407(Config) ->
- asn1_test_lib:compile("EUTRA-extract-7407", Config, [uper]),
+ Opts = [uper,?NO_MAPS_MODULE],
+ asn1_test_lib:compile("EUTRA-extract-7407", Config, Opts),
ticket_7407_code(true),
- asn1_test_lib:compile("EUTRA-extract-7407", Config, [uper,no_final_padding]),
+ asn1_test_lib:compile("EUTRA-extract-7407", Config,
+ [no_final_padding|Opts]),
ticket_7407_code(false).
ticket_7407_code(FinalPadding) ->
@@ -1287,16 +1309,72 @@ ticket7904(Config) ->
{ok,_} = 'RANAPextract1':encode('InitiatingMessage', Val1),
{ok,_} = 'RANAPextract1':encode('InitiatingMessage', Val1).
+
+%% Make sure that functions exported from other modules are
+%% actually used.
+
xref(_Config) ->
- xref:start(s),
- xref:set_default(s, [{verbose,false},{warnings,false},{builtins,true}]),
+ S = ?FUNCTION_NAME,
+ xref:start(S),
+ xref:set_default(S, [{verbose,false},{warnings,false},{builtins,true}]),
Test = filename:dirname(code:which(?MODULE)),
- {ok,_PMs} = xref:add_directory(s, Test),
- UnusedExports = "X - XU - asn1_appup_test - asn1_app_test - \".*_SUITE\" : Mod",
- case xref:q(s, UnusedExports) of
+ {ok,_PMs} = xref:add_directory(S, Test),
+ Q = "X - XU - \".*_SUITE\" : Mod",
+ UnusedExports = xref:q(S, Q),
+ xref:stop(S),
+ case UnusedExports of
{ok,[]} ->
ok;
{ok,[_|_]=Res} ->
io:format("Exported, but unused: ~p\n", [Res]),
?t:fail()
end.
+
+%% Ensure that all functions that are implicitly exported by
+%% 'export_all' in this module are actually used.
+
+xref_export_all(_Config) ->
+ S = ?FUNCTION_NAME,
+ xref:start(S),
+ xref:set_default(S, [{verbose,false},{warnings,false},{builtins,true}]),
+ {ok,_PMs} = xref:add_module(S, code:which(?MODULE)),
+ AllCalled = all_called(),
+ Def = "Called := " ++ lists:flatten(io_lib:format("~p", [AllCalled])),
+ {ok,_} = xref:q(S, Def),
+ {ok,Unused} = xref:q(S, "X - Called - range (closure E | Called)"),
+ xref:stop(S),
+ case Unused of
+ [] ->
+ ok;
+ [_|_] ->
+ S = [io_lib:format("~p:~p/~p\n", [M,F,A]) || {M,F,A} <- Unused],
+ io:format("There are unused functions:\n\n~s\n", [S]),
+ ?t:fail(unused_functions)
+ end.
+
+%% Collect all functions that common_test will call in this module.
+
+all_called() ->
+ [{?MODULE,end_per_group,2},
+ {?MODULE,end_per_suite,1},
+ {?MODULE,end_per_testcase,2},
+ {?MODULE,init_per_group,2},
+ {?MODULE,init_per_suite,1},
+ {?MODULE,init_per_testcase,2},
+ {?MODULE,suite,0}] ++
+ all_called_1(all() ++ groups()).
+
+all_called_1([{_,_}|T]) ->
+ all_called_1(T);
+all_called_1([{_Name,_Flags,Fs}|T]) ->
+ all_called_1(Fs ++ T);
+all_called_1([F|T]) when is_atom(F) ->
+ L = case erlang:function_exported(?MODULE, F, 0) of
+ false ->
+ [{?MODULE,F,1}];
+ true ->
+ [{?MODULE,F,0},{?MODULE,F,1}]
+ end,
+ L ++ all_called_1(T);
+all_called_1([]) ->
+ [].
diff --git a/lib/asn1/test/asn1_SUITE_data/Maps.asn1 b/lib/asn1/test/asn1_SUITE_data/Maps.asn1
new file mode 100644
index 0000000000..fd5f373e45
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/Maps.asn1
@@ -0,0 +1,17 @@
+Maps DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+XY ::= SEQUENCE { x INTEGER DEFAULT 0, y INTEGER DEFAULT 0 }
+
+xy1 XY ::= { x 42, y 17 }
+xy2 XY ::= { }
+xy3 XY ::= { y 999 }
+
+S ::= SEQUENCE {
+ xy XY DEFAULT { x 100, y 100 },
+ os OCTET STRING OPTIONAL
+}
+
+s1 S ::= {}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/Prim.asn1 b/lib/asn1/test/asn1_SUITE_data/Prim.asn1
index 4fe0901683..91c8696e61 100644
--- a/lib/asn1/test/asn1_SUITE_data/Prim.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/Prim.asn1
@@ -18,6 +18,8 @@ BEGIN
IntExpPri ::= [PRIVATE 51] EXPLICIT INTEGER
IntExpApp ::= [APPLICATION 52] EXPLICIT INTEGER
+ IntConstrained ::= INTEGER (0..255)
+
IntEnum ::= INTEGER {first(1),last(31)}
Enum ::= ENUMERATED {monday(1),tuesday(2),wednesday(3),thursday(4),
diff --git a/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1
index 5fda19303a..e866ef2f4f 100644
--- a/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1
@@ -48,6 +48,17 @@ SeqExt6 ::= SEQUENCE
[[ i6 [106] INTEGER, i7 [107] INTEGER ]]
}
+SeqExt7 ::= SEQUENCE
+{
+ -- The spaces between the ellipsis and the comma will prevent them
+ -- from being removed.
+ ... ,
+ [[ a INTEGER (0..65535) OPTIONAL,
+ b OCTET STRING OPTIONAL,
+ c BOOLEAN
+ ]]
+}
+
SeqExt1X ::= XSeqExt1
SeqExt2X ::= XSeqExt2
diff --git a/lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl b/lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl
index 6cf8ecf451..cd6c74b995 100644
--- a/lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl
+++ b/lib/asn1/test/asn1_SUITE_data/extensionAdditionGroup.erl
@@ -120,10 +120,10 @@ run3(Erule) ->
asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE},
asn1_NOVALUE,asn1_NOVALUE}}}}}}},
io:format("~p:~p~n",[Erule,Val]),
- {ok,List}= asn1rt:encode('EUTRA-RRC-Definitions','DL-DCCH-Message',Val),
+ {ok,List}= 'EUTRA-RRC-Definitions':encode('DL-DCCH-Message',Val),
Enc = iolist_to_binary(List),
io:format("Result from encode:~n~p~n",[Enc]),
- {ok,Val2} = asn1rt:decode('EUTRA-RRC-Definitions','DL-DCCH-Message',Enc),
+ {ok,Val2} = 'EUTRA-RRC-Definitions':decode('DL-DCCH-Message', Enc),
io:format("Result from decode:~n~p~n",[Val2]),
case Val2 of
Val -> ok;
diff --git a/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Discriptions.asn b/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Descriptions.asn
index b9be9934e4..12a4475422 100644
--- a/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Discriptions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Descriptions.asn
@@ -4,7 +4,7 @@
--
-- **************************************************************
-NBAP-PDU-Discriptions {
+NBAP-PDU-Descriptions {
itu-t (0) identified-organization (4) etsi (0) mobileDomain (0)
umts-Access (20) modules (3) nbap (2) version1 (1) nbap-PDU-Descriptions (0) }
diff --git a/lib/asn1/test/asn1_SUITE_data/test_records.erl b/lib/asn1/test/asn1_SUITE_data/test_records.erl
index 9fd07c1449..afb1c8c80b 100644
--- a/lib/asn1/test/asn1_SUITE_data/test_records.erl
+++ b/lib/asn1/test/asn1_SUITE_data/test_records.erl
@@ -25,7 +25,7 @@
-define(line,put(test_server_loc,{?MODULE,?LINE}),).
--include("NBAP-PDU-Discriptions.hrl").
+-include("NBAP-PDU-Descriptions.hrl").
-include("NBAP-PDU-Contents.hrl").
-include("NBAP-Containers.hrl").
-include("NBAP-CommonDataTypes.hrl").
diff --git a/lib/asn1/test/asn1_SUITE_data/testobj.erl b/lib/asn1/test/asn1_SUITE_data/testobj.erl
index a0e00f8314..66f4a92188 100644
--- a/lib/asn1/test/asn1_SUITE_data/testobj.erl
+++ b/lib/asn1/test/asn1_SUITE_data/testobj.erl
@@ -967,7 +967,7 @@ pdu_pdp() ->
116,101,115,116, % lable1 = test
4, % length lable2
116,101,115,116, % lable2 = test
- 4, % lenght lable3
+ 4, % length lable3
116,101,115,116, % lable3 = test
4, % length lable3
116,101,115,116, % lable4 = test
@@ -1410,16 +1410,14 @@ int2bin(Int) ->
%%%%%%%%%%%%%%%%% wrappers %%%%%%%%%%%%%%%%%%%%%%%%
wrapper_encode(Module,Type,Value) ->
- case asn1rt:encode(Module,Type,Value) of
- {ok,X} when binary(X) ->
+ case Module:encode(Type, Value) of
+ {ok,X} when is_binary(X) ->
{ok, binary_to_list(X)};
- {ok,X} ->
- {ok, binary_to_list(list_to_binary(X))};
Error ->
Error
end.
wrapper_decode(Module, Type, Bytes) when is_binary(Bytes) ->
- asn1rt:decode(Module, Type, Bytes);
+ Module:decode(Type, Bytes);
wrapper_decode(Module, Type, Bytes) when is_list(Bytes) ->
- asn1rt:decode(Module, Type, list_to_binary(Bytes)).
+ Module:decode(Type, list_to_binary(Bytes)).
diff --git a/lib/asn1/test/asn1_app_test.erl b/lib/asn1/test/asn1_app_SUITE.erl
index 028322f555..c089a7267c 100644
--- a/lib/asn1/test/asn1_app_test.erl
+++ b/lib/asn1/test/asn1_app_SUITE.erl
@@ -21,23 +21,24 @@
%%----------------------------------------------------------------------
%% Purpose: Verify the application specifics of the asn1 application
%%----------------------------------------------------------------------
--module(asn1_app_test).
-
--compile(export_all).
+-module(asn1_app_SUITE).
+-export([all/0,groups/0,init_per_group/2,end_per_group/2,
+ init_per_suite/1,end_per_suite/1,
+ appup/1,fields/1,modules/1,export_all/1,app_depend/1]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-all() ->
- [fields, modules, exportall, app_depend].
+all() ->
+ [appup, fields, modules, export_all, app_depend].
-groups() ->
+groups() ->
[].
init_per_group(_GroupName, Config) ->
- Config.
+ Config.
end_per_group(_GroupName, Config) ->
- Config.
+ Config.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -65,12 +66,15 @@ is_app(App) ->
end_per_suite(Config) when is_list(Config) ->
Config.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+appup(Config) when is_list(Config) ->
+ ok = test_server:appup_test(asn1).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% .
fields(Config) when is_list(Config) ->
- AppFile = key1search(app_file, Config),
+ AppFile = key1find(app_file, Config),
Fields = [vsn, description, modules, registered, applications],
case check_fields(Fields, AppFile, []) of
[] ->
@@ -96,10 +100,9 @@ check_field(Name, AppFile, Missing) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% .
modules(Config) when is_list(Config) ->
- AppFile = key1search(app_file, Config),
- Mods = key1search(modules, AppFile),
+ AppFile = key1find(app_file, Config),
+ Mods = key1find(modules, AppFile),
EbinList = get_ebin_mods(asn1),
case missing_modules(Mods, EbinList, []) of
[] ->
@@ -112,10 +115,9 @@ modules(Config) when is_list(Config) ->
ok;
Extra ->
check_asn1ct_modules(Extra)
-% throw({error, {extra_modules, Extra}})
end,
{ok, Mods}.
-
+
get_ebin_mods(App) ->
LibDir = code:lib_dir(App),
EbinDir = filename:join([LibDir,"ebin"]),
@@ -166,10 +168,9 @@ extra_modules(Mods, [Mod|Ebins], Extra) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% .
-exportall(Config) when is_list(Config) ->
- AppFile = key1search(app_file, Config),
- Mods = key1search(modules, AppFile),
+export_all(Config) when is_list(Config) ->
+ AppFile = key1find(app_file, Config),
+ Mods = key1find(modules, AppFile),
check_export_all(Mods).
@@ -180,10 +181,10 @@ check_export_all([Mod|Mods]) ->
{'EXIT', {undef, _}} ->
check_export_all(Mods);
O ->
- case lists:keysearch(options, 1, O) of
+ case lists:keyfind(options, 1, O) of
false ->
check_export_all(Mods);
- {value, {options, List}} ->
+ {options, List} ->
case lists:member(export_all, List) of
true ->
throw({error, {export_all, Mod}});
@@ -193,13 +194,12 @@ check_export_all([Mod|Mods]) ->
end
end.
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% .
app_depend(Config) when is_list(Config) ->
- AppFile = key1search(app_file, Config),
- Apps = key1search(applications, AppFile),
+ AppFile = key1find(app_file, Config),
+ Apps = key1find(applications, AppFile),
check_apps(Apps).
@@ -220,10 +220,10 @@ check_apps([App|Apps]) ->
fail(Reason) ->
exit({suite_failed, Reason}).
-key1search(Key, L) ->
- case lists:keysearch(Key, 1, L) of
- undefined ->
+key1find(Key, L) ->
+ case lists:keyfind(Key, 1, L) of
+ false ->
fail({not_found, Key, L});
- {value, {Key, Value}} ->
+ {Key, Value} ->
Value
end.
diff --git a/lib/asn1/test/asn1_appup_test.erl b/lib/asn1/test/asn1_appup_test.erl
deleted file mode 100644
index 54540e53cc..0000000000
--- a/lib/asn1/test/asn1_appup_test.erl
+++ /dev/null
@@ -1,58 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%
-%%----------------------------------------------------------------------
-%% Purpose: Verify the application specifics of the asn1 application
-%%----------------------------------------------------------------------
--module(asn1_appup_test).
--compile(export_all).
--include_lib("common_test/include/ct.hrl").
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-all() ->
- [appup].
-
-groups() ->
- [].
-
-init_per_group(_GroupName, Config) ->
- Config.
-
-end_per_group(_GroupName, Config) ->
- Config.
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-init_per_suite(Config) when is_list(Config) ->
- Config.
-
-
-end_per_suite(Config) when is_list(Config) ->
- Config.
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-appup() ->
- [{doc, "perform a simple check of the asn1 appup file"}].
-appup(Config) when is_list(Config) ->
- ok = ?t:appup_test(asn1).
diff --git a/lib/asn1/test/asn1_test_lib.erl b/lib/asn1/test/asn1_test_lib.erl
index dc614db4f2..a79958d229 100644
--- a/lib/asn1/test/asn1_test_lib.erl
+++ b/lib/asn1/test/asn1_test_lib.erl
@@ -25,7 +25,8 @@
hex_to_bin/1,
match_value/2,
parallel/0,
- roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4]).
+ roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4,
+ map_roundtrip/3]).
-include_lib("common_test/include/ct.hrl").
@@ -94,15 +95,58 @@ module(F0) ->
list_to_atom(F).
%% filename:join(CaseDir, F ++ ".beam").
-compile_file(File, Options) ->
+compile_file(File, Options0) ->
+ Options = [warnings_as_errors|Options0],
try
- ok = asn1ct:compile(File, [warnings_as_errors|Options])
+ ok = asn1ct:compile(File, Options),
+ ok = compile_maps(File, Options)
catch
_:Reason ->
ct:print("Failed to compile ~s\n~p", [File,Reason]),
error
end.
+compile_maps(File, Options) ->
+ unload_map_mod(File),
+ Incompat = [abs,compact_bit_string,legacy_bit_string,
+ legacy_erlang_types,maps,asn1_test_lib_no_maps],
+ case lists:any(fun(E) -> lists:member(E, Incompat) end, Options) of
+ true ->
+ ok;
+ false ->
+ compile_maps_1(File, Options)
+ end.
+
+compile_maps_1(File, Options) ->
+ ok = asn1ct:compile(File, [maps,no_ok_wrapper,noobj|Options]),
+ OutDir = proplists:get_value(outdir, Options),
+ Base0 = filename:rootname(filename:basename(File)),
+ Base = case filename:extension(Base0) of
+ ".set" ->
+ filename:rootname(Base0);
+ _ ->
+ Base0
+ end,
+ ErlBase = Base ++ ".erl",
+ ErlFile = filename:join(OutDir, ErlBase),
+ {ok,Erl0} = file:read_file(ErlFile),
+ Erl = re:replace(Erl0, <<"-module\\('">>, "&maps_"),
+ MapsErlFile = filename:join(OutDir, "maps_" ++ ErlBase),
+ ok = file:write_file(MapsErlFile, Erl),
+ {ok,_} = compile:file(MapsErlFile, [report,{outdir,OutDir},{i,OutDir}]),
+ ok.
+
+unload_map_mod(File0) ->
+ File1 = filename:basename(File0),
+ File2 = filename:rootname(File1, ".asn"),
+ File3 = filename:rootname(File2, ".asn1"),
+ File4 = filename:rootname(File3, ".py"),
+ File = filename:rootname(File4, ".set"),
+ MapMod = list_to_atom("maps_"++File),
+ code:delete(MapMod),
+ code:purge(MapMod),
+ ok.
+
compile_erlang(Mod, Config, Options) ->
DataDir = proplists:get_value(data_dir, Config),
CaseDir = proplists:get_value(case_dir, Config),
@@ -147,24 +191,60 @@ roundtrip(Mod, Type, Value) ->
roundtrip(Mod, Type, Value, Value).
roundtrip(Mod, Type, Value, ExpectedValue) ->
- {ok,Encoded} = Mod:encode(Type, Value),
- {ok,ExpectedValue} = Mod:decode(Type, Encoded),
- test_ber_indefinite(Mod, Type, Encoded, ExpectedValue),
- ok.
+ roundtrip_enc(Mod, Type, Value, ExpectedValue).
roundtrip_enc(Mod, Type, Value) ->
roundtrip_enc(Mod, Type, Value, Value).
roundtrip_enc(Mod, Type, Value, ExpectedValue) ->
- {ok,Encoded} = Mod:encode(Type, Value),
- {ok,ExpectedValue} = Mod:decode(Type, Encoded),
+ case Mod:encode(Type, Value) of
+ {ok,Encoded} ->
+ {ok,ExpectedValue} = Mod:decode(Type, Encoded);
+ Encoded when is_binary(Encoded) ->
+ ExpectedValue = Mod:decode(Type, Encoded)
+ end,
+ map_roundtrip(Mod, Type, Encoded),
test_ber_indefinite(Mod, Type, Encoded, ExpectedValue),
Encoded.
+map_roundtrip(Mod, Type, Encoded) ->
+ MapMod = list_to_atom("maps_"++atom_to_list(Mod)),
+ try MapMod:maps() of
+ true ->
+ map_roundtrip_1(MapMod, Type, Encoded)
+ catch
+ error:undef ->
+ ok
+ end.
+
%%%
%%% Internal functions.
%%%
+map_roundtrip_1(Mod, Type, Encoded) ->
+ Decoded = Mod:decode(Type, Encoded),
+ case Mod:encode(Type, Decoded) of
+ Encoded ->
+ ok;
+ OtherEncoding ->
+ case is_named_bitstring(Decoded) of
+ true ->
+ %% In BER, named BIT STRINGs with different number of
+ %% trailing zeroes decode to the same value.
+ ok;
+ false ->
+ error({encode_mismatch,Decoded,Encoded,OtherEncoding})
+ end
+ end,
+ ok.
+
+is_named_bitstring([H|T]) ->
+ is_atom(H) andalso is_named_bitstring(T);
+is_named_bitstring([]) ->
+ true;
+is_named_bitstring(_) ->
+ false.
+
hex2num(C) when $0 =< C, C =< $9 -> C - $0;
hex2num(C) when $A =< C, C =< $F -> C - $A + 10;
hex2num(C) when $a =< C, C =< $f -> C - $a + 10.
@@ -179,7 +259,12 @@ test_ber_indefinite(Mod, Type, Encoded, ExpectedValue) ->
case Mod:encoding_rule() of
ber ->
Indefinite = iolist_to_binary(ber_indefinite(Encoded)),
- {ok,ExpectedValue} = Mod:decode(Type, Indefinite);
+ case Mod:decode(Type, Indefinite) of
+ {ok,ExpectedValue} ->
+ ok;
+ ExpectedValue ->
+ ok
+ end;
_ ->
ok
end.
diff --git a/lib/asn1/test/ber_decode_error.erl b/lib/asn1/test/ber_decode_error.erl
index c0840e02d7..c45d130ff4 100644
--- a/lib/asn1/test/ber_decode_error.erl
+++ b/lib/asn1/test/ber_decode_error.erl
@@ -26,48 +26,41 @@ run([]) ->
{ok,B} = 'Constructed':encode('S3', {'S3',17}),
[T,L|V] = binary_to_list(B),
Bytes = list_to_binary([T,L+3|V] ++ [2,1,3]),
- case 'Constructed':decode('S3', Bytes) of
- {error,{asn1,{unexpected,_}}} -> ok
- end,
+ {unexpected,_} = dec_error('S3', Bytes),
%% Unexpected bytes must be accepted if there is an extensionmark
{ok,{'S3ext',17}} = 'Constructed':decode('S3ext', Bytes),
%% Truncated tag.
- {error,{asn1,{invalid_tag,_}}} =
- (catch 'Constructed':decode('I', <<31,255,255>>)),
+ {invalid_tag,_} = dec_error('I', <<31,255,255>>),
%% Overlong tag.
- {error,{asn1,{invalid_tag,_}}} =
- (catch 'Constructed':decode('I', <<31,255,255,255,127>>)),
+ {invalid_tag,_} = dec_error('I', <<31,255,255,255,127>>),
%% Invalid length.
- {error,{asn1,{invalid_length,_}}} =
- (catch 'Constructed':decode('I', <<8,255>>)),
+ {invalid_length,_} = dec_error('I', <<8,255>>),
%% Other errors.
- {error,{asn1,{invalid_value,_}}} =
- (catch 'Constructed':decode('I', <<>>)),
+ {invalid_value,_} = dec_error('I', <<>>),
- {error,{asn1,{invalid_value,_}}} =
- (catch 'Constructed':decode('I', <<8,7>>)),
+ {invalid_value,_} = dec_error('I', <<8,7>>),
%% Short indefinite length. Make sure that the decoder doesn't look
%% beyond the end of binary when looking for a 0,0 terminator.
- {error,{asn1,{invalid_length,_}}} =
- (catch 'Constructed':decode('S', sub(<<8,16#80,0,0>>, 3))),
- {error,{asn1,{invalid_length,_}}} =
- (catch 'Constructed':decode('S', sub(<<8,16#80,0,0>>, 2))),
- {error,{asn1,{invalid_length,_}}} =
- (catch 'Constructed':decode('S', sub(<<40,16#80,1,1,255,0,0>>, 6))),
- {error,{asn1,{invalid_length,_}}} =
- (catch 'Constructed':decode('S', sub(<<40,16#80,1,1,255,0,0>>, 5))),
+ {invalid_length,_} = dec_error('S', sub(<<8,16#80,0,0>>, 3)),
+ {invalid_length,_} = dec_error('S', sub(<<8,16#80,0,0>>, 2)),
+ {invalid_length,_} = dec_error('S', sub(<<40,16#80,1,1,255,0,0>>, 6)),
+ {invalid_length,_} = dec_error('S', sub(<<40,16#80,1,1,255,0,0>>, 5)),
%% A primitive must not be encoded with an indefinite length.
- {error,{asn1,{invalid_length,_}}} =
- (catch 'Constructed':decode('OS', <<4,128,4,3,97,98,99,0,0>>)),
+ {invalid_length,_} = dec_error('OS', <<4,128,4,3,97,98,99,0,0>>),
ok.
+dec_error(T, Bin) ->
+ {error,{asn1,{Reason,Stk}}} = 'Constructed':decode(T, Bin),
+ [{_,_,_,_}|_] = Stk,
+ Reason.
+
sub(Bin, Bytes) ->
<<B:Bytes/binary,_/binary>> = Bin,
B.
diff --git a/lib/asn1/test/h323test.erl b/lib/asn1/test/h323test.erl
index 935af0ba09..41a9159335 100644
--- a/lib/asn1/test/h323test.erl
+++ b/lib/asn1/test/h323test.erl
@@ -27,6 +27,8 @@ run(per) -> run();
run(_Rules) -> ok.
run() ->
+ roundtrip('EndpointType', endpoint()),
+ roundtrip('Alerting-UUIE', alerting_uuie()),
roundtrip('H323-UserInformation', alerting_val(), alerting_enc()),
roundtrip('H323-UserInformation', connect_val(), connect_enc()),
general_string(),
@@ -36,18 +38,24 @@ alerting_val() ->
{'H323-UserInformation',
{'H323-UU-PDU',
{alerting,
- {'Alerting-UUIE',
- {0,0,8,2250,0,2},
- {'EndpointType',asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,
- asn1_NOVALUE,asn1_NOVALUE,
- {'TerminalInfo',asn1_NOVALUE},
- false,false},
- asn1_NOVALUE,
- {'CallIdentifier',<<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>},
- asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE}},
+ alerting_uuie()},
asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE},
asn1_NOVALUE}.
+endpoint() ->
+ {'EndpointType',asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,
+ asn1_NOVALUE,asn1_NOVALUE,
+ {'TerminalInfo',asn1_NOVALUE},
+ false,false}.
+
+alerting_uuie() ->
+ {'Alerting-UUIE',
+ {0,0,8,2250,0,2},
+ endpoint(),
+ asn1_NOVALUE,
+ {'CallIdentifier',<<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>},
+ asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE}.
+
alerting_enc() ->
"0380060008914a0002020120110000000000000000000000000000000000".
@@ -82,6 +90,9 @@ general_string() ->
UI = <<109,64,1,57>>,
{ok, _V} = 'MULTIMEDIA-SYSTEM-CONTROL':decode(Type, UI).
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('H323-MESSAGES', T, V).
+
roundtrip(T, V, HexString) ->
Enc = asn1_test_lib:hex_to_bin(HexString),
Enc = asn1_test_lib:roundtrip_enc('H323-MESSAGES', T, V),
diff --git a/lib/asn1/test/testChoPrim.erl b/lib/asn1/test/testChoPrim.erl
index 573c482f2b..61b6ab2d05 100644
--- a/lib/asn1/test/testChoPrim.erl
+++ b/lib/asn1/test/testChoPrim.erl
@@ -31,10 +31,10 @@ bool(Rules) ->
roundtrip('ChoCon', {int2,233}),
case Rules of
ber ->
- {error,{asn1,{invalid_choice_type,wrong}}} =
- (catch 'ChoPrim':encode('ChoCon', {wrong,233})),
- {error,{asn1,{invalid_choice_tag,_WrongTag}}} =
- (catch 'ChoPrim':decode('ChoCon', <<131,2,0,233>>));
+ {error,{asn1,{{invalid_choice_type,wrong},[_|_]}}} =
+ (catch 'ChoPrim':encode('ChoCon', {wrong,233})),
+ {error,{asn1,{{invalid_choice_tag,_WrongTag},[_|_]}}} =
+ (catch 'ChoPrim':decode('ChoCon', <<131,2,0,233>>));
per ->
ok;
uper ->
diff --git a/lib/asn1/test/testContextSwitchingTypes.erl b/lib/asn1/test/testContextSwitchingTypes.erl
index 10012908a9..5688d8afd6 100644
--- a/lib/asn1/test/testContextSwitchingTypes.erl
+++ b/lib/asn1/test/testContextSwitchingTypes.erl
@@ -90,5 +90,6 @@ check_object_identifier(Tuple) when is_tuple(Tuple) ->
enc_dec(T, V0) ->
M = 'ContextSwitchingTypes',
{ok,Enc} = M:encode(T, V0),
+ asn1_test_lib:map_roundtrip(M, T, Enc),
{ok,V} = M:decode(T, Enc),
V.
diff --git a/lib/asn1/test/testInfObj.erl b/lib/asn1/test/testInfObj.erl
index 5a9f47d865..c519c70cdf 100644
--- a/lib/asn1/test/testInfObj.erl
+++ b/lib/asn1/test/testInfObj.erl
@@ -197,5 +197,6 @@ roundtrip(M, T, V) ->
enc_dec(M, T, V0) ->
{ok,Enc} = M:encode(T, V0),
+ asn1_test_lib:map_roundtrip(M, T, Enc),
{ok,V} = M:decode(T, Enc),
V.
diff --git a/lib/asn1/test/testInfObjectClass.erl b/lib/asn1/test/testInfObjectClass.erl
index 560986fac9..540407fa51 100644
--- a/lib/asn1/test/testInfObjectClass.erl
+++ b/lib/asn1/test/testInfObjectClass.erl
@@ -33,19 +33,29 @@ main(Rule) ->
roundtrip('Seq', Val),
%% OTP-5783
- {error,{asn1,{'Type not compatible with table constraint',
- {component,'ArgumentType'},
- {value,_},_}}} = 'InfClass':encode('Seq', {'Seq',12,13,1}),
+ {'Type not compatible with table constraint',
+ {component,'ArgumentType'},
+ {value,_},_} = enc_error('Seq', {'Seq',12,13,1}),
Bytes2 = case Rule of
ber ->
<<48,9,2,1,12,2,1,11,2,1,1>>;
_ ->
<<1,12,1,11,1,1>>
end,
- {error,{asn1,{'Type not compatible with table constraint',
- {{component,_},
- {value,_B},_}}}} = 'InfClass':decode('Seq', Bytes2),
+ {'Type not compatible with table constraint',
+ {{component,_},
+ {value,_B},_}} = dec_error('Seq', Bytes2),
ok.
roundtrip(T, V) ->
asn1_test_lib:roundtrip('InfClass', T, V).
+
+enc_error(T, V) ->
+ {error,{asn1,{Reason,Stk}}} = 'InfClass':encode(T, V),
+ [{_,_,_,_}|_] = Stk,
+ Reason.
+
+dec_error(T, Bin) ->
+ {error,{asn1,{Reason,Stk}}} = 'InfClass':decode(T, Bin),
+ [{_,_,_,_}|_] = Stk,
+ Reason.
diff --git a/lib/asn1/test/testMaps.erl b/lib/asn1/test/testMaps.erl
new file mode 100644
index 0000000000..45dd2255ba
--- /dev/null
+++ b/lib/asn1/test/testMaps.erl
@@ -0,0 +1,50 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+-module(testMaps).
+
+-export([main/1]).
+
+main(_) ->
+ M = 'Maps',
+ true = M:maps(),
+
+ true = M:xy1() =:= #{x=>42,y=>17},
+ true = M:xy2() =:= #{x=>0,y=>0},
+ true = M:xy3() =:= #{x=>0,y=>999},
+ true = M:s1() =:= #{xy=>#{x=>100,y=>100}},
+
+ roundtrip('XY', M:xy1()),
+ roundtrip('XY', M:xy2()),
+ roundtrip('XY', M:xy3()),
+ roundtrip('XY', #{}, #{x=>0,y=>0}),
+
+ roundtrip('S', M:s1()),
+ roundtrip('S', #{}, #{xy=>#{x=>100,y=>100}}),
+ roundtrip('S', #{os=><<1,2,3>>}, #{xy=>#{x=>100,y=>100},
+ os=><<1,2,3>>}),
+
+ ok.
+
+roundtrip(Type, Value) ->
+ roundtrip(Type, Value, Value).
+
+roundtrip(Type, Value, Expected) ->
+ asn1_test_lib:roundtrip('Maps', Type, Value, Expected).
diff --git a/lib/asn1/test/testMultipleLevels.erl b/lib/asn1/test/testMultipleLevels.erl
index c610e59f3d..e9d83665aa 100644
--- a/lib/asn1/test/testMultipleLevels.erl
+++ b/lib/asn1/test/testMultipleLevels.erl
@@ -24,5 +24,7 @@
main(_) ->
Data = {'Top',{short,"abc"},{long,"a long string follows here"}},
- {ok,B} = 'MultipleLevels':encode('Top', Data),
- {ok,Data} = 'MultipleLevels':decode('Top', iolist_to_binary(B)).
+ roundtrip('Top', Data).
+
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('MultipleLevels', T, V).
diff --git a/lib/asn1/test/testNBAPsystem.erl b/lib/asn1/test/testNBAPsystem.erl
index 1af283af42..8d61ca18ce 100644
--- a/lib/asn1/test/testNBAPsystem.erl
+++ b/lib/asn1/test/testNBAPsystem.erl
@@ -84,7 +84,7 @@ compile(Config, Options) ->
M <- ["NBAP-CommonDataTypes.asn",
"NBAP-IEs.asn",
"NBAP-PDU-Contents.asn",
- "NBAP-PDU-Discriptions.asn",
+ "NBAP-PDU-Descriptions.asn",
"NBAP-Constants.asn",
"NBAP-Containers.asn"]],
asn1_test_lib:compile_all(Fs, Config, Options),
@@ -98,16 +98,16 @@ test(_Erule,Config) ->
ticket_5812(Config) ->
Msg = v_5812(),
- {ok,B2} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg),
+ {ok,B2} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg),
V = <<0,28,74,0,3,48,0,0,1,0,123,64,41,0,0,0,126,64,35,95,208,2,89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,145,0,1,205,0,0,0,0,2,98,64,1,128>>,
ok = compare(V,B2),
- {ok,Msg2} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B2),
+ {ok,Msg2} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B2),
ok = check_record_names(Msg2,Config).
enc_audit_req_msg() ->
Msg = {initiatingMessage, audit_req_msg()},
- {ok,B} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg),
- {ok,_Msg} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B),
+ {ok,B} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg),
+ {ok,_Msg} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B),
{initiatingMessage,
#'InitiatingMessage'{value=#'AuditRequest'{protocolIEs=[{_,114,ignore,_}],
protocolExtensions = asn1_NOVALUE}}} = _Msg,
@@ -116,8 +116,8 @@ enc_audit_req_msg() ->
cell_setup_req_msg_test() ->
Msg = {initiatingMessage, cell_setup_req_msg()},
- {ok,B} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg),
- {ok,_Msg} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B),
+ {ok,B} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg),
+ {ok,_Msg} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B),
io:format("Msg: ~P~n~n_Msg: ~P~n",[Msg,15,_Msg,15]),
ok.
diff --git a/lib/asn1/test/testPrim.erl b/lib/asn1/test/testPrim.erl
index 96a2dd6c79..b2933dfabc 100644
--- a/lib/asn1/test/testPrim.erl
+++ b/lib/asn1/test/testPrim.erl
@@ -34,15 +34,12 @@ bool(Rules) ->
Types = ['Bool','BoolCon','BoolPri','BoolApp',
'BoolExpCon','BoolExpPri','BoolExpApp'],
[roundtrip(T, V) || T <- Types, V <- [true,false]],
- case Rules of
- ber ->
- [begin
- {error,{asn1,{encode_boolean,517}}} = enc_error(T, 517)
- end || T <- Types],
- ok;
- _ ->
- ok
- end.
+ Tag = case Rules of
+ ber -> encode_boolean;
+ _ -> illegal_boolean
+ end,
+ [{Tag,517} = enc_error(T, 517) || T <- Types],
+ ok.
int(Rules) ->
@@ -60,10 +57,22 @@ int(Rules) ->
123456789,12345678901234567890,
-1,-2,-3,-4,-100,-127,-255,-256,-257,
-1234567890,-2147483648],
- [roundtrip(T, V) ||
- T <- ['Int','IntCon','IntPri','IntApp',
- 'IntExpCon','IntExpPri','IntExpApp'],
- V <- [1|Values]],
+ Types = ['Int','IntCon','IntPri','IntApp',
+ 'IntExpCon','IntExpPri','IntExpApp'],
+ _ = [roundtrip(T, V) || T <- Types, V <- [1|Values]],
+ Tag = case Rules of
+ ber -> encode_integer;
+ _ -> illegal_integer
+ end,
+ _ = [{Tag,V} = enc_error(T, V) ||
+ T <- Types, V <- [atom,42.0,{a,b,c}]],
+ case Rules of
+ ber ->
+ ok;
+ _ ->
+ _ = [{Tag,V} = enc_error('IntConstrained', V) ||
+ V <- [atom,-1,256,42.0]]
+ end,
%%==========================================================
%% IntEnum ::= INTEGER {first(1),last(31)}
@@ -119,7 +128,11 @@ enum(Rules) ->
roundtrip('Enum', monday),
roundtrip('Enum', thursday),
- {error,{asn1,{_,4}}} = enc_error('Enum', 4),
+ Tag = case Rules of
+ ber -> enumerated_not_in_range;
+ _ -> illegal_enumerated
+ end,
+ {Tag,4} = enc_error('Enum', 4),
case Rules of
Per when Per =:= per; Per =:= uper ->
@@ -182,13 +195,15 @@ roundtrip(Type, Value, ExpectedValue) ->
enc_error(T, V) ->
case get(no_ok_wrapper) of
false ->
- 'Prim':encode(T, V);
+ {error,{asn1,{Reason,Stk}}} = 'Prim':encode(T, V),
+ [{_,_,_,_}|_] = Stk,
+ Reason;
true ->
try 'Prim':encode(T, V) of
_ ->
?t:fail()
catch
- _:Reason ->
+ _:{error,{asn1,Reason}} ->
Reason
end
end.
diff --git a/lib/asn1/test/testPrimStrings.erl b/lib/asn1/test/testPrimStrings.erl
index cb97655c15..b7f0323301 100644
--- a/lib/asn1/test/testPrimStrings.erl
+++ b/lib/asn1/test/testPrimStrings.erl
@@ -19,8 +19,6 @@
%%
%%
-module(testPrimStrings).
--compile([{nowarn_deprecated_function,{asn1rt,utf8_list_to_binary,1}},
- {nowarn_deprecated_function,{asn1rt,utf8_binary_to_list,1}}]).
-export([bit_string/2]).
-export([octet_string/1]).
@@ -756,19 +754,21 @@ utf8_string(_Rules) ->
16#800,
16#ffff,
16#10000,
- 16#1fffff,
- 16#200000,
- 16#3ffffff,
- 16#4000000,
- 16#7fffffff],
+ 16#1ffff,
+ 16#20000,
+ 16#2ffff,
+ 16#e0000,
+ 16#effff,
+ 16#F0000,
+ 16#10ffff],
[begin
- {ok,UTF8} = asn1rt:utf8_list_to_binary([Char]),
- {ok,[Char]} = asn1rt:utf8_binary_to_list(UTF8),
+ UTF8 = unicode:characters_to_binary([Char]),
+ [Char] = unicode:characters_to_list([UTF8]),
roundtrip('UTF', UTF8)
end || Char <- AllRanges],
- {ok,UTF8} = asn1rt:utf8_list_to_binary(AllRanges),
- {ok,AllRanges} = asn1rt:utf8_binary_to_list(UTF8),
+ UTF8 = unicode:characters_to_binary(AllRanges),
+ AllRanges = unicode:characters_to_list(UTF8),
roundtrip('UTF', UTF8),
ok.
diff --git a/lib/asn1/test/testRfcs.erl b/lib/asn1/test/testRfcs.erl
index da7333ef98..20176e35eb 100644
--- a/lib/asn1/test/testRfcs.erl
+++ b/lib/asn1/test/testRfcs.erl
@@ -35,22 +35,27 @@ compile(Config, Erules, Options0) ->
asn1_test_lib:compile_all(Specs, Config, [Erules,{i,CaseDir}|Options]).
test() ->
- {1,3,6,1,5,5,7,48,1,2} =
- IdPkixOcspNonce =
- 'OCSP-2009':'id-pkix-ocsp-nonce'(),
- roundtrip('OCSP-2009', 'OCSPRequest',
- {'OCSPRequest',
- {'TBSRequest',
- 0,
- {rfc822Name,"name string"},
- [{'Request',
- {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE},
- <<"POTATOHASH">>,<<"HASHBROWN">>,42},
- [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}],
- asn1_NOVALUE},
- asn1_NOVALUE}),
- otp_7759(),
- ok.
+ M = 'OCSP-2009',
+ case M:maps() of
+ false ->
+ {1,3,6,1,5,5,7,48,1,2} =
+ IdPkixOcspNonce =
+ 'OCSP-2009':'id-pkix-ocsp-nonce'(),
+ roundtrip('OCSP-2009', 'OCSPRequest',
+ {'OCSPRequest',
+ {'TBSRequest',
+ 0,
+ {rfc822Name,"name string"},
+ [{'Request',
+ {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE},
+ <<"POTATOHASH">>,<<"HASHBROWN">>,42},
+ [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}],
+ asn1_NOVALUE},
+ asn1_NOVALUE}),
+ otp_7759(records);
+ true ->
+ otp_7759(maps)
+ end.
roundtrip(Module, Type, Value0) ->
Enc = Module:encode(Type, Value0),
@@ -58,7 +63,7 @@ roundtrip(Module, Type, Value0) ->
asn1_test_lib:match_value(Value0, Value1),
ok.
-otp_7759() ->
+otp_7759(Pack) ->
%% The release note for asn-1.6.6 says:
%% Decode of an open_type when the value was empty tagged
%% type encoded with indefinite length failed.
@@ -66,10 +71,15 @@ otp_7759() ->
Encoded = encoded_msg(),
ContentInfo = Mod:decode('ContentInfo', Encoded),
io:format("~p\n", [ContentInfo]),
- {'ContentInfo',_Id,PKCS7_content} = ContentInfo,
- X = Mod:decode('SignedData', PKCS7_content),
+ Content = case ContentInfo of
+ {'ContentInfo',_Id,Content0} when Pack =:= records ->
+ Content0;
+ #{'content-type':=_,'pkcs7-content':=Content0}
+ when Pack =:= maps ->
+ Content0
+ end,
+ X = Mod:decode('SignedData', Content),
io:format("~p\n", [X]),
- io:nl(),
ok.
encoded_msg() ->
diff --git a/lib/asn1/test/testSeqExtension.erl b/lib/asn1/test/testSeqExtension.erl
index f7885cb002..be1d1c2490 100644
--- a/lib/asn1/test/testSeqExtension.erl
+++ b/lib/asn1/test/testSeqExtension.erl
@@ -31,6 +31,7 @@
-record('SeqExt4',{bool, int}).
-record('SeqExt5',{name, shoesize}).
-record('SeqExt6',{i1,i2,i3,i4,i5,i6,i7}).
+-record('SeqExt7',{a=asn1_NOVALUE,b=asn1_NOVALUE,c}).
-record('SuperSeq',{s1,s2,s3,s4,s5,s6,i}).
main(Erule, DataDir, Opts) ->
@@ -45,8 +46,35 @@ main(Erule, DataDir, Opts) ->
roundtrip('SeqExt4', #'SeqExt4'{bool=true,int=12345}),
roundtrip('SeqExt4', #'SeqExt4'{bool=false,int=123456}),
+ case Erule of
+ ber ->
+ %% BER currently does not handle Extension Addition Groups
+ %% correctly.
+ ok;
+ _ ->
+ v_roundtrip3('SeqExt5', #'SeqExt5'{name=asn1_NOVALUE,
+ shoesize=asn1_NOVALUE},
+ Erule, #{per=>"00",
+ uper=>"00"}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{c=asn1_NOVALUE},
+ Erule, #{per=>"00",
+ uper=>"00"})
+ end,
roundtrip('SeqExt5', #'SeqExt5'{name = <<"Arne">>,shoesize=47}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{c=false},
+ Erule, #{per=>"80800100",
+ uper=>"80808000"}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{c=true},
+ Erule, #{per=>"80800120",
+ uper=>"80809000"}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{a=777,b = <<16#AA>>,c=false},
+ Erule, #{per=>"808006C0 030901AA 00",
+ uper=>"8082E061 20354000"}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{a=8888,c=false},
+ Erule, #{per=>"80800480 22B800",
+ uper=>"8081C457 0000"}),
+
%% Encode a value with this version of the specification.
BigInt = 128638468966,
SuperSeq = #'SuperSeq'{s1=#'SeqExt1'{},
@@ -106,6 +134,7 @@ main(Erule, DataDir, Opts) ->
v_roundtrip2(Erule, 'SeqExt130',
list_to_tuple(['SeqExt130'|
lists:duplicate(129, asn1_NOVALUE)++[199]])),
+
ok.
roundtrip(Type, Value) ->
@@ -118,6 +147,15 @@ v_roundtrip2(Erule, Type, Value) ->
roundtrip2(Type, Value) ->
asn1_test_lib:roundtrip_enc('SeqExtension2', Type, Value).
+v_roundtrip3(Type, Value, Erule, Map) ->
+ case maps:find(Erule, Map) of
+ {ok,Hex} ->
+ Encoded = asn1_test_lib:hex_to_bin(Hex),
+ Encoded = asn1_test_lib:roundtrip_enc('SeqExtension', Type, Value);
+ error ->
+ asn1_test_lib:roundtrip('SeqExtension', Type, Value)
+ end.
+
v(ber, 'SeqExt66') -> "30049F41 017D";
v(per, 'SeqExt66') -> "C0420000 00000000 00004001 FA";
v(uper, 'SeqExt66') -> "D0800000 00000000 00101FA0";
diff --git a/lib/asn1/test/testTCAP.erl b/lib/asn1/test/testTCAP.erl
index 422ae1f0fc..a6f0f9fad7 100644
--- a/lib/asn1/test/testTCAP.erl
+++ b/lib/asn1/test/testTCAP.erl
@@ -92,5 +92,6 @@ test_asn1config() ->
enc_dec(T, V0) ->
M = 'TCAPPackage',
{ok,Enc} = M:encode(T, V0),
+ asn1_test_lib:map_roundtrip(M, T, Enc),
{ok,V} = M:decode(T, Enc),
V.
diff --git a/lib/asn1/test/testTimer.erl b/lib/asn1/test/testTimer.erl
index bd8da85735..3edeb1b712 100644
--- a/lib/asn1/test/testTimer.erl
+++ b/lib/asn1/test/testTimer.erl
@@ -25,7 +25,42 @@
-define(times, 5000).
-val() ->
+go() ->
+ Module = 'H323-MESSAGES',
+ Type = 'H323-UserInformation',
+ Value = case Module:maps() of
+ false -> val_records();
+ true -> val_maps()
+ end,
+ Bytes = Module:encode(Type, Value),
+ Value = Module:decode(Type, Bytes),
+
+ {ValWr,done} = timer:tc(fun() -> encode(?times, Module, Type, Value) end),
+ io:format("ASN.1 encoding: ~p micro~n", [ValWr / ?times]),
+
+ done = decode(2, Module, Type, Bytes),
+
+ {ValRead,done} = timer:tc(fun() -> decode(?times, Module, Type, Bytes) end),
+ io:format("ASN.1 decoding: ~p micro~n", [ValRead /?times]),
+
+ Comment = "encode: "++integer_to_list(round(ValWr/?times)) ++
+ " micro, decode: "++integer_to_list(round(ValRead /?times)) ++
+ " micro. [" ++ atom_to_list(Module:encoding_rule()) ++ "]",
+ {comment,Comment}.
+
+encode(0, _Module,_Type,_Value) ->
+ done;
+encode(N, Module,Type,Value) ->
+ Module:encode(Type, Value),
+ encode(N-1, Module, Type, Value).
+
+decode(0, _Module, _Type, _Value) ->
+ done;
+decode(N, Module, Type, Value) ->
+ Module:decode(Type, Value),
+ decode(N-1, Module, Type, Value).
+
+val_records() ->
{'H323-UserInformation',{'H323-UU-PDU',
{callProceeding,
{'CallProceeding-UUIE',
@@ -126,34 +161,66 @@ val() ->
{'H323-UserInformation_user-data',24,<<"O">>}}.
-go() ->
- Module = 'H323-MESSAGES',
- Type = 'H323-UserInformation',
- Value = val(),
- Bytes = Module:encode(Type, Value),
- Value = Module:decode(Type, Bytes),
-
- {ValWr,done} = timer:tc(fun() -> encode(?times, Module, Type, Value) end),
- io:format("ASN.1 encoding: ~p micro~n", [ValWr / ?times]),
-
- done = decode(2, Module, Type, Bytes),
-
- {ValRead,done} = timer:tc(fun() -> decode(?times, Module, Type, Bytes) end),
- io:format("ASN.1 decoding: ~p micro~n", [ValRead /?times]),
-
- Comment = "encode: "++integer_to_list(round(ValWr/?times)) ++
- " micro, decode: "++integer_to_list(round(ValRead /?times)) ++
- " micro. [" ++ atom_to_list(Module:encoding_rule()) ++ "]",
- {comment,Comment}.
-
-encode(0, _Module,_Type,_Value) ->
- done;
-encode(N, Module,Type,Value) ->
- Module:encode(Type, Value),
- encode(N-1, Module, Type, Value).
-
-decode(0, _Module, _Type, _Value) ->
- done;
-decode(N, Module, Type, Value) ->
- Module:decode(Type, Value),
- decode(N-1, Module, Type, Value).
+val_maps() ->
+#{'h323-uu-pdu' => #{h245Control => [],
+ h245Tunneling => true,
+ 'h323-message-body' => {callProceeding,#{callIdentifier => #{guid => <<"OCTET STRINGOCTE">>},
+ cryptoTokens => [{cryptoGKPwdEncr,#{algorithmOID => {1,18,467,467},
+ encryptedData => <<"OC">>,
+ paramS => #{iv8 => <<"OCTET ST">>,
+ ranInt => -7477016}}},
+ {cryptoGKPwdEncr,#{algorithmOID => {1,19,486,486},
+ encryptedData => <<>>,
+ paramS => #{iv8 => <<"OCTET ST">>,
+ ranInt => -2404513}}}],
+ destinationInfo => #{gatekeeper => #{nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,10,260}}}},
+ gateway => #{nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,13,326}}},
+ protocol => [{h320,#{dataRatesSupported => [#{channelMultiplier => 78,
+ channelRate => 1290470518,
+ nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,11,295}}}}],
+ nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,11,282}}},
+ supportedPrefixes => [#{nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,12,312}}},
+ prefix => {'h323-ID',"BM"}}]}}]},
+ mc => true,
+ mcu => #{nonStandardData => #{data => <<"OC">>,
+ nonStandardIdentifier => {object,{1,13,340,340}}}},
+ nonStandardData => #{data => <<"O">>,nonStandardIdentifier => {object,{0,9,237}}},
+ terminal => #{nonStandardData => #{data => <<"OC">>,
+ nonStandardIdentifier => {object,{1,14,353,354}}}},
+ undefinedNode => true,
+ vendor => #{productId => <<"OC">>,
+ vendor => #{manufacturerCode => 16282,
+ t35CountryCode => 62,
+ t35Extension => 63},
+ versionId => <<"OC">>}},
+ fastStart => [],
+ h245Address => {ipxAddress,#{netnum => <<"OCTE">>,
+ node => <<"OCTET ">>,
+ port => <<"OC">>}},
+ h245SecurityMode => {noSecurity,'NULL'},
+ protocolIdentifier => {0,8,222},
+ tokens => [#{certificate => #{certificate => <<"OC">>,type => {1,16,405,406}},
+ challenge => <<"OCTET STR">>,
+ dhkey => #{generator => <<1:1>>,halfkey => <<1:1>>,modSize => <<1:1>>},
+ generalID => "BMP",
+ nonStandard => #{data => <<"OC">>,nonStandardIdentifier => {1,16,414,415}},
+ password => "BM",
+ random => -26430296,
+ timeStamp => 1667517741},
+ #{certificate => #{certificate => <<"OC">>,type => {1,17,442,443}},
+ challenge => <<"OCTET STRI">>,
+ dhkey => #{generator => <<1:1>>,halfkey => <<1:1>>,modSize => <<1:1>>},
+ generalID => "BMP",
+ nonStandard => #{data => <<"OC">>,nonStandardIdentifier => {1,18,452,452}},
+ password => "BMP",
+ random => -16356110,
+ timeStamp => 1817656756}]}},
+ h4501SupplementaryService => [],
+ nonStandardControl => [],
+ nonStandardData => #{data => <<>>,nonStandardIdentifier => {object,{0,3,84}}}},
+ 'user-data' => #{'protocol-discriminator' => 24,'user-information' => <<"O">>}}.
diff --git a/lib/asn1/test/testUniqueObjectSets.erl b/lib/asn1/test/testUniqueObjectSets.erl
index 4d3ec94391..30cbceb577 100644
--- a/lib/asn1/test/testUniqueObjectSets.erl
+++ b/lib/asn1/test/testUniqueObjectSets.erl
@@ -27,6 +27,7 @@ seq_roundtrip(I, D0) ->
M = 'UniqueObjectSets',
try
{ok,Enc} = M:encode('Seq', {'Seq',I,D0}),
+ asn1_test_lib:map_roundtrip(M, 'Seq', Enc),
{ok,{'Seq',I,D}} = M:decode('Seq', Enc),
D
catch C:E ->
diff --git a/lib/asn1/test/test_compile_options.erl b/lib/asn1/test/test_compile_options.erl
index ac74470537..c15e61550c 100644
--- a/lib/asn1/test/test_compile_options.erl
+++ b/lib/asn1/test/test_compile_options.erl
@@ -24,8 +24,8 @@
-include_lib("common_test/include/ct.hrl").
--export([wrong_path/1,comp/2,path/1,ticket_6143/1,noobj/1,
- record_name_prefix/1,verbose/1]).
+-export([wrong_path/1,comp/2,path/1,noobj/1,
+ record_name_prefix/1,verbose/1,maps/1]).
%% OTP-5689
wrong_path(Config) ->
@@ -64,8 +64,6 @@ path(Config) ->
file:set_cwd(CWD),
ok.
-ticket_6143(Config) -> asn1_test_lib:compile("AA1", Config, []).
-
noobj(Config) ->
DataDir = proplists:get_value(data_dir,Config),
OutDir = proplists:get_value(priv_dir,Config),
@@ -130,6 +128,28 @@ verbose(Config) when is_list(Config) ->
[] = test_server:capture_get(),
ok.
+maps(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ OutDir = proplists:get_value(case_dir, Config),
+ InFile = filename:join(DataDir, "P-Record"),
+
+ do_maps(ber, InFile, OutDir),
+ do_maps(per, InFile, OutDir),
+ do_maps(uper, InFile, OutDir).
+
+do_maps(Erule, InFile, OutDir) ->
+ Opts = [Erule,maps,{outdir,OutDir}],
+ ok = asn1ct:compile(InFile, Opts),
+
+ %% Make sure that no .hrl files are generated.
+ [] = filelib:wildcard(filename:join(OutDir, "*.hrl")),
+
+ %% Remove all generated files.
+ All = filelib:wildcard(filename:join(OutDir, "*")),
+ _ = [file:delete(N) || N <- All],
+
+ ok.
+
outfiles_check(OutDir) ->
outfiles_check(OutDir,outfiles1()).
diff --git a/lib/common_test/doc/src/Makefile b/lib/common_test/doc/src/Makefile
index e495f587a3..152ece5d25 100644
--- a/lib/common_test/doc/src/Makefile
+++ b/lib/common_test/doc/src/Makefile
@@ -53,7 +53,8 @@ XML_REF3_FILES = ct.xml \
ct_slave.xml \
ct_property_test.xml \
ct_netconfc.xml \
- ct_hooks.xml
+ ct_hooks.xml \
+ ct_testspec.xml
XML_REF6_FILES = common_test_app.xml
XML_PART_FILES = part.xml
diff --git a/lib/common_test/doc/src/common_test_app.xml b/lib/common_test/doc/src/common_test_app.xml
index 48ffe653e4..d407a0a53f 100644
--- a/lib/common_test/doc/src/common_test_app.xml
+++ b/lib/common_test/doc/src/common_test_app.xml
@@ -224,7 +224,9 @@
</type>
<desc>
- <p>OPTIONAL</p>
+ <p>OPTIONAL; if this function is defined, then <seealso
+ marker="#Module:end_per_suite-1"><c>end_per_suite/1</c></seealso>
+ must also be defined.</p>
<p>This configuration function is called as the first function in the
suite. It typically contains initializations that are common for
@@ -256,7 +258,9 @@
</type>
<desc>
- <p>OPTIONAL</p>
+ <p>OPTIONAL; if this function is defined, then <seealso
+ marker="#Module:init_per_suite-1"><c>init_per_suite/1</c></seealso>
+ must also be defined.</p>
<p>This function is called as the last test case in the
suite. It is meant to be used for cleaning up after
@@ -360,7 +364,9 @@
</type>
<desc>
- <p>OPTIONAL</p>
+ <p>OPTIONAL; if this function is defined, then <seealso
+ marker="#Module:end_per_group-2"><c>end_per_group/2</c></seealso>
+ must also be defined.</p>
<p>This configuration function is called before execution of a
test case group. It typically contains initializations that are
@@ -396,7 +402,9 @@
</type>
<desc>
- <p>OPTIONAL</p>
+ <p>OPTIONAL; if this function is defined, then <seealso
+ marker="#Module:init_per_group-2"><c>init_per_group/2</c></seealso>
+ must also be defined.</p>
<p>This function is called after the execution of a test case group
is finished. It is meant to be used for cleaning up after
@@ -427,7 +435,10 @@
</type>
<desc>
- <p>OPTIONAL</p>
+ <p>OPTIONAL; if this function is defined,
+ then <seealso marker="#Module:end_per_testcase-2">
+ <c>end_per_testcase/2</c></seealso> must also be
+ defined.</p>
<p>This function is called before each test case. Argument
<c>TestCase</c> is the test case name, and
@@ -454,7 +465,10 @@
</type>
<desc>
- <p>OPTIONAL</p>
+ <p>OPTIONAL; if this function is defined,
+ then <seealso marker="#Module:init_per_testcase-2">
+ <c>init_per_testcase/2</c></seealso> must also be
+ defined.</p>
<p>This function is called after each test case, and can be used
to clean up after
diff --git a/lib/common_test/doc/src/ct_hooks.xml b/lib/common_test/doc/src/ct_hooks.xml
index c2cf29c530..a085f30262 100644
--- a/lib/common_test/doc/src/ct_hooks.xml
+++ b/lib/common_test/doc/src/ct_hooks.xml
@@ -208,9 +208,10 @@
</func>
<func>
- <name>Module:pre_init_per_group(GroupName, InitData, CTHState) -&gt; Result</name>
+ <name>Module:pre_init_per_group(SuiteName, GroupName, InitData, CTHState) -&gt; Result</name>
<fsummary>Called before init_per_group.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>GroupName = atom()</v>
<v>InitData = Config | SkipOrFail</v>
<v>Config = NewConfig = [{Key,Value}]</v>
@@ -231,13 +232,19 @@
but for function
<seealso marker="common_test#Module:init_per_group-2"><c>init_per_group</c></seealso>
instead.</p>
+
+ <p>If <c>Module:pre_init_per_group/4</c> is not exported, common_test
+ will attempt to call <c>Module:pre_init_per_group(GroupName,
+ InitData, CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
<func>
- <name>Module:post_init_per_group(GroupName, Config, Return, CTHState) -&gt; Result</name>
+ <name>Module:post_init_per_group(SuiteName, GroupName, Config, Return, CTHState) -&gt; Result</name>
<fsummary>Called after init_per_group.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>GroupName = atom()</v>
<v>Config = [{Key,Value}]</v>
<v>Return = NewReturn = Config | SkipOrFail | term()</v>
@@ -258,13 +265,19 @@
but for function
<seealso marker="common_test#Module:init_per_group-2"><c>init_per_group</c></seealso>
instead.</p>
+
+ <p>If <c>Module:post_init_per_group/5</c> is not exported, common_test
+ will attempt to call <c>Module:post_init_per_group(GroupName,
+ Config, Return, CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
<func>
- <name>Module:pre_init_per_testcase(TestcaseName, InitData, CTHState) -&gt; Result</name>
+ <name>Module:pre_init_per_testcase(SuiteName, TestcaseName, InitData, CTHState) -&gt; Result</name>
<fsummary>Called before init_per_testcase.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>TestcaseName = atom()</v>
<v>InitData = Config | SkipOrFail</v>
<v>Config = NewConfig = [{Key,Value}]</v>
@@ -286,6 +299,11 @@
<seealso marker="common_test#Module:init_per_testcase-2"><c>init_per_testcase</c></seealso>
instead.</p>
+ <p>If <c>Module:pre_init_per_testcase/4</c> is not exported, common_test
+ will attempt to call <c>Module:pre_init_per_testcase(TestcaseName,
+ InitData, CTHState)</c> instead. This is for backwards
+ compatibility.</p>
+
<p>CTHs cannot be added here right now. That feature may be added in
a later release, but it would right now break backwards
compatibility.</p>
@@ -293,9 +311,10 @@
</func>
<func>
- <name>Module:post_init_per_testcase(TestcaseName, Config, Return, CTHState) -&gt; Result</name>
+ <name>Module:post_init_per_testcase(SuiteName, TestcaseName, Config, Return, CTHState) -&gt; Result</name>
<fsummary>Called after init_per_testcase.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>TestcaseName = atom()</v>
<v>Config = [{Key,Value}]</v>
<v>Return = NewReturn = Config | SkipOrFail | term()</v>
@@ -316,15 +335,21 @@
but for function
<seealso marker="common_test#Module:init_per_testcase-2"><c>init_per_testcase</c></seealso>
instead.</p>
+
+ <p>If <c>Module:post_init_per_testcase/5</c> is not exported, common_test
+ will attempt to call <c>Module:post_init_per_testcase(TestcaseName,
+ Config, Return, CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
<func>
- <name>Module:pre_end_per_testcase(TestcaseName, InitData, CTHState) -&gt; Result</name>
+ <name>Module:pre_end_per_testcase(SuiteName, TestcaseName, EndData, CTHState) -&gt; Result</name>
<fsummary>Called before end_per_testcase.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>TestcaseName = atom()</v>
- <v>InitData = Config</v>
+ <v>EndData = Config</v>
<v>Config = NewConfig = [{Key,Value}]</v>
<v>CTHState = NewCTHState = term()</v>
<v>Result = {NewConfig, NewCTHState}</v>
@@ -345,14 +370,20 @@
<p>This function can not change the result of the test case by returning skip or fail
tuples, but it may insert items in <c>Config</c> that can be read in
- <c>end_per_testcase/2</c> or in <c>post_end_per_testcase/4</c>.</p>
+ <c>end_per_testcase/2</c> or in <c>post_end_per_testcase/5</c>.</p>
+
+ <p>If <c>Module:pre_end_per_testcase/4</c> is not exported, common_test
+ will attempt to call <c>Module:pre_end_per_testcase(TestcaseName,
+ EndData, CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
<func>
- <name>Module:post_end_per_testcase(TestcaseName, Config, Return, CTHState) -&gt; Result</name>
+ <name>Module:post_end_per_testcase(SuiteName, TestcaseName, Config, Return, CTHState) -&gt; Result</name>
<fsummary>Called after end_per_testcase.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>TestcaseName = atom()</v>
<v>Config = [{Key,Value}]</v>
<v>Return = NewReturn = Config | SkipOrFail | term()</v>
@@ -373,13 +404,19 @@
but for function
<seealso marker="common_test#Module:end_per_testcase-2"><c>end_per_testcase</c></seealso>
instead.</p>
+
+ <p>If <c>Module:post_end_per_testcase/5</c> is not exported, common_test
+ will attempt to call <c>Module:post_end_per_testcase(TestcaseName,
+ Config, Return, CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
<func>
- <name>Module:pre_end_per_group(GroupName, EndData, CTHState) -&gt; Result</name>
+ <name>Module:pre_end_per_group(SuiteName, GroupName, EndData, CTHState) -&gt; Result</name>
<fsummary>Called before end_per_group.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>GroupName = atom()</v>
<v>EndData = Config | SkipOrFail</v>
<v>Config = NewConfig = [{Key,Value}]</v>
@@ -400,13 +437,19 @@
but for function
<seealso marker="common_test#Module:end_per_group-2"><c>end_per_group</c></seealso>
instead.</p>
+
+ <p>If <c>Module:pre_end_per_group/4</c> is not exported, common_test
+ will attempt to call <c>Module:pre_end_per_group(GroupName,
+ EndData, CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
<func>
- <name>Module:post_end_per_group(GroupName, Config, Return, CTHState) -&gt; Result</name>
+ <name>Module:post_end_per_group(SuiteName, GroupName, Config, Return, CTHState) -&gt; Result</name>
<fsummary>Called after end_per_group.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>GroupName = atom()</v>
<v>Config = [{Key,Value}]</v>
<v>Return = NewReturn = Config | SkipOrFail | term()</v>
@@ -427,6 +470,11 @@
but for function
<seealso marker="common_test#Module:end_per_group-2">end_per_group</seealso>
instead.</p>
+
+ <p>If <c>Module:post_end_per_group/5</c> is not exported, common_test
+ will attempt to call <c>Module:post_end_per_group(GroupName,
+ Config, Return, CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
@@ -485,9 +533,10 @@
</func>
<func>
- <name>Module:on_tc_fail(TestName, Reason, CTHState) -&gt; NewCTHState</name>
+ <name>Module:on_tc_fail(SuiteName, TestName, Reason, CTHState) -&gt; NewCTHState</name>
<fsummary>Called after the CTH scope ends.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>TestName = init_per_suite | end_per_suite | {init_per_group,GroupName} | {end_per_group,GroupName} | {FuncName,GroupName} | FuncName</v>
<v>FuncName = atom()</v>
<v>GroupName = atom()</v>
@@ -505,7 +554,7 @@
<item><p>If <c>init_per_suite</c> fails, this function is called after
<seealso marker="#Module:post_init_per_suite-4"><c>post_init_per_suite</c></seealso>.</p></item>
<item><p>If a test case fails, this funcion is called after
- <seealso marker="#Module:post_end_per_testcase-4"><c>post_end_per_testcase</c></seealso>.</p></item>
+ <seealso marker="#Module:post_end_per_testcase-5"><c>post_end_per_testcase</c></seealso>.</p></item>
</list>
<p>If the failed test case belongs to a test case group, the first
@@ -519,13 +568,19 @@
For details, see section
<seealso marker="event_handler_chapter#events">Event Handling</seealso>
in the User's Guide.</p>
+
+ <p>If <c>Module:on_tc_fail/4</c> is not exported, common_test
+ will attempt to call <c>Module:on_tc_fail(TestName, Reason,
+ CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
<func>
- <name>Module:on_tc_skip(TestName, Reason, CTHState) -&gt; NewCTHState</name>
+ <name>Module:on_tc_skip(SuiteName, TestName, Reason, CTHState) -&gt; NewCTHState</name>
<fsummary>Called after the CTH scope ends.</fsummary>
<type>
+ <v>SuiteName = atom()</v>
<v>TestName = init_per_suite | end_per_suite | {init_per_group,GroupName} | {end_per_group,GroupName} | {FuncName,GroupName} | FuncName</v>
<v>FuncName = atom()</v>
<v>GroupName = atom()</v>
@@ -542,9 +597,9 @@
<list type="bulleted">
<item><p>If <c>init_per_group</c> is skipped, this function is
called after
- <seealso marker="#Module:post_init_per_group-4"><c>post_init_per_group</c></seealso>.</p></item>
+ <seealso marker="#Module:post_init_per_group-5"><c>post_init_per_group</c></seealso>.</p></item>
<item><p>If a test case is skipped, this function is called after
- <seealso marker="#Module:post_end_per_testcase-4"><c>post_end_per_testcase</c></seealso>.</p></item>
+ <seealso marker="#Module:post_end_per_testcase-5"><c>post_end_per_testcase</c></seealso>.</p></item>
</list>
<p>If the skipped test case belongs to a test case group, the first
@@ -559,6 +614,11 @@
For details, see section
<seealso marker="event_handler_chapter#events">Event Handling</seealso>
in the User's Guide.</p>
+
+ <p>If <c>Module:on_tc_skip/4</c> is not exported, common_test
+ will attempt to call <c>Module:on_tc_skip(TestName, Reason,
+ CTHState)</c> instead. This is for backwards
+ compatibility.</p>
</desc>
</func>
diff --git a/lib/common_test/doc/src/ct_hooks_chapter.xml b/lib/common_test/doc/src/ct_hooks_chapter.xml
index 0e4c35e11f..bfad96e489 100644
--- a/lib/common_test/doc/src/ct_hooks_chapter.xml
+++ b/lib/common_test/doc/src/ct_hooks_chapter.xml
@@ -38,7 +38,7 @@
extensions of the default behavior of <c>Common Test</c> using hooks
before and after all test suite calls. CTHs allow advanced <c>Common Test</c>
users to abstract out behavior that is common to multiple test suites
- without littering all test suites with library calls. this can be used
+ without littering all test suites with library calls. This can be used
for logging, starting, and monitoring external systems,
building C files needed by the tests, and so on.</p>
@@ -175,10 +175,10 @@
<row>
<cell><seealso marker="common_test#Module:init_per_group-2">
init_per_group/2</seealso></cell>
- <cell><seealso marker="ct_hooks#Module:post_init_per_group-4">
- post_init_per_group/4</seealso> is called</cell>
- <cell><seealso marker="ct_hooks#Module:post_end_per_suite-4">
- post_end_per_group/4</seealso> has been called for that group</cell>
+ <cell><seealso marker="ct_hooks#Module:post_init_per_group-5">
+ post_init_per_group/5</seealso> is called</cell>
+ <cell><seealso marker="ct_hooks#Module:post_end_per_group-5">
+ post_end_per_group/5</seealso> has been called for that group</cell>
</row>
<tcaption>Scope of a CTH</tcaption>
</table>
@@ -245,16 +245,18 @@
</list>
<p>
- This is done in the CTH functions called pre_&lt;name of function&gt;.
- These functions take the same three arguments, <c>Name</c>,
+ This is done in the CTH functions called <c>pre_&lt;name of function&gt;</c>.
+ These functions take the arguments <c>SuiteName</c>, <c>Name</c> (group or test case name, if applicable),
<c>Config</c>, and <c>CTHState</c>. The return value of the CTH function
is always a combination of a result for the suite/group/test and an
updated <c>CTHState</c>.</p>
<p>To let the test suite continue on executing, return the configuration
- list that you want the test to use as the result. To skip or
- fail the test, return a tuple with <c>skip</c> or <c>fail</c>, and a reason
- as the result.</p>
+ list that you want the test to use as the result.</p>
+
+ <p>All pre hooks, except <c>pre_end_per_testcase/4</c>, can
+ skip or fail the test by returning a tuple with <c>skip</c> or
+ <c>fail</c>, and a reason as the result.</p>
<p><em>Example:</em></p>
<code>
@@ -290,7 +292,7 @@
<p>
This is done in the CTH functions called <c>post_&lt;name of function&gt;</c>.
- These functions take the same four arguments, <c>Name</c>,
+ These functions take the arguments <c>SuiteName</c>, <c>Name</c> (group or test case name, if applicable),
<c>Config</c>, <c>Return</c>, and <c>CTHState</c>. <c>Config</c> in this
case is the same <c>Config</c> as the testcase is called with.
<c>Return</c> is the value returned by the testcase. If the testcase
@@ -308,7 +310,7 @@
<p><em>Example:</em></p>
<code>
- post_end_per_testcase(_TC, Config, {'EXIT',{_,_}}, CTHState) -&gt;
+ post_end_per_testcase(_Suite, _TC, Config, {'EXIT',{_,_}}, CTHState) -&gt;
case db:check_consistency() of
true ->
%% DB is good, pass the test.
@@ -317,7 +319,7 @@
%% DB is not good, mark as skipped instead of failing
{{skip, "DB is inconsisten!"}, CTHState}
end;
- post_end_per_testcase(_TC, Config, Return, CTHState) -&gt;
+ post_end_per_testcase(_Suite, _TC, Config, Return, CTHState) -&gt;
%% Do nothing if tc does not crash.
{Return, CTHState}.</code>
@@ -331,8 +333,8 @@
<title>Skip and Fail Hooks</title>
<p>
After any post hook has been executed for all installed CTHs,
- <seealso marker="ct_hooks#Module:on_tc_fail-3">on_tc_fail</seealso>
- or <seealso marker="ct_hooks#Module:on_tc_skip-3">on_tc_skip</seealso>
+ <seealso marker="ct_hooks#Module:on_tc_fail-4">on_tc_fail</seealso>
+ or <seealso marker="ct_hooks#Module:on_tc_skip-4">on_tc_skip</seealso>
is called if the testcase failed or was skipped, respectively.
You cannot affect the outcome of the tests any further at this point.
</p>
@@ -389,18 +391,18 @@
-export([pre_end_per_suite/3]).
-export([post_end_per_suite/4]).
- -export([pre_init_per_group/3]).
- -export([post_init_per_group/4]).
- -export([pre_end_per_group/3]).
- -export([post_end_per_group/4]).
+ -export([pre_init_per_group/4]).
+ -export([post_init_per_group/5]).
+ -export([pre_end_per_group/4]).
+ -export([post_end_per_group/5]).
- -export([pre_init_per_testcase/3]).
- -export([post_init_per_testcase/4]).
- -export([pre_end_per_testcase/3]).
- -export([post_end_per_testcase/4]).
+ -export([pre_init_per_testcase/4]).
+ -export([post_init_per_testcase/5]).
+ -export([pre_end_per_testcase/4]).
+ -export([post_end_per_testcase/5]).
- -export([on_tc_fail/3]).
- -export([on_tc_skip/3]).
+ -export([on_tc_fail/4]).
+ -export([on_tc_skip/4]).
-export([terminate/1]).
@@ -435,46 +437,46 @@
total = State#state.total + State#state.suite_total } }.
%% @doc Called before each init_per_group.
- pre_init_per_group(Group,Config,State) ->
+ pre_init_per_group(Suite,Group,Config,State) ->
{Config, State}.
%% @doc Called after each init_per_group.
- post_init_per_group(Group,Config,Return,State) ->
+ post_init_per_group(Suite,Group,Config,Return,State) ->
{Return, State}.
%% @doc Called before each end_per_group.
- pre_end_per_group(Group,Config,State) ->
+ pre_end_per_group(Suite,Group,Config,State) ->
{Config, State}.
%% @doc Called after each end_per_group.
- post_end_per_group(Group,Config,Return,State) ->
+ post_end_per_group(Suite,Group,Config,Return,State) ->
{Return, State}.
%% @doc Called before each init_per_testcase.
- pre_init_per_testcase(TC,Config,State) ->
+ pre_init_per_testcase(Suite,TC,Config,State) ->
{Config, State#state{ ts = now(), total = State#state.suite_total + 1 } }.
%% Called after each init_per_testcase (immediately before the test case).
- post_init_per_testcase(TC,Config,Return,State) ->
+ post_init_per_testcase(Suite,TC,Config,Return,State) ->
{Return, State}
%% @doc Called before each end_per_testcase (immediately after the test case).
- pre_end_per_testcase(TC,Config,State) ->
+ pre_end_per_testcase(Suite,TC,Config,State) ->
{Config, State}.
%% @doc Called after each end_per_testcase.
- post_end_per_testcase(TC,Config,Return,State) ->
- TCInfo = {testcase, TC, Return, timer:now_diff(now(), State#state.ts)},
+ post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ TCInfo = {testcase, Suite, TC, Return, timer:now_diff(now(), State#state.ts)},
{Return, State#state{ ts = undefined, tcs = [TCInfo | State#state.tcs] } }.
%% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group,
%% post_end_per_group and post_end_per_testcase if the suite, group or test case failed.
- on_tc_fail(TC, Reason, State) ->
+ on_tc_fail(Suite, TC, Reason, State) ->
State.
%% @doc Called when a test case is skipped by either user action
%% or due to an init function failing.
- on_tc_skip(TC, Reason, State) ->
+ on_tc_skip(Suite, TC, Reason, State) ->
State.
%% @doc Called when the scope of the CTH is done
diff --git a/lib/common_test/doc/src/ct_testspec.xml b/lib/common_test/doc/src/ct_testspec.xml
new file mode 100644
index 0000000000..36893f66cf
--- /dev/null
+++ b/lib/common_test/doc/src/ct_testspec.xml
@@ -0,0 +1,84 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE erlref SYSTEM "erlref.dtd">
+
+<erlref>
+ <header>
+ <copyright>
+ <year>2016</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ </legalnotice>
+
+ <title>ct_testspec</title>
+ <prepared></prepared>
+ <responsible></responsible>
+ <docno></docno>
+ <approved></approved>
+ <checked></checked>
+ <date></date>
+ <rev>A</rev>
+ <file>ct_testspec.xml</file>
+ </header>
+ <module>ct_testspec</module>
+ <modulesummary>Parsing of test specifications for Common Test.
+ </modulesummary>
+
+<description>
+
+ <p>Parsing of test specifications for <c>Common Test</c>.</p>
+
+ <p>This module exports help functions for parsing of test specifications.</p>
+
+</description>
+
+ <funcs>
+ <func>
+ <name>get_tests(SpecsIn) -&gt; {ok, [{Specs,Tests}]} | {error, Reason}</name>
+ <fsummary>Parse the given test specification files and return the tests to run and skip.</fsummary>
+ <type>
+ <v>SpecsIn = [string()] | [[string()]]</v>
+ <v>Specs = [string()]</v>
+ <v>Test = [{Node,Run,Skip}]</v>
+ <v>Node = atom()</v>
+ <v>Run = {Dir,Suites,Cases}</v>
+ <v>Skip = {Dir,Suites,Comment} | {Dir,Suites,Cases,Comment}</v>
+ <v>Dir = string()</v>
+ <v>Suites = atom | [atom()] | all</v>
+ <v>Cases = atom | [atom()] | all</v>
+ <v>Comment = string()</v>
+ <v>Reason = term()</v>
+ </type>
+ <desc><marker id="add_nodes-1"/>
+ <p>Parse the given test specification files and return the
+ tests to run and skip.</p>
+
+ <p>If <c>SpecsIn=[Spec1,Spec2,...]</c>, separate tests will be
+ created per specification. If
+ <c>SpecsIn=[[Spec1,Spec2,...]]</c>, all specifications will be
+ merge into one test.</p>
+
+ <p>For each test, a <c>{Specs,Tests}</c> element is returned,
+ where <c>Specs</c> is a list of all included test
+ specifications, and <c>Tests</c> specifies actual tests to
+ run/skip per node.</p>
+ </desc>
+ </func>
+
+ </funcs>
+
+</erlref>
+
+
diff --git a/lib/common_test/doc/src/ref_man.xml b/lib/common_test/doc/src/ref_man.xml
index d1567e2d3c..1ac20db5c2 100644
--- a/lib/common_test/doc/src/ref_man.xml
+++ b/lib/common_test/doc/src/ref_man.xml
@@ -47,6 +47,7 @@
<xi:include href="ct_slave.xml"/>
<xi:include href="ct_hooks.xml"/>
<xi:include href="ct_property_test.xml"/>
+ <xi:include href="ct_testspec.xml"/>
</application>
diff --git a/lib/common_test/doc/src/write_test_chapter.xml b/lib/common_test/doc/src/write_test_chapter.xml
index f70bdb16c5..6a0d87bcaf 100644
--- a/lib/common_test/doc/src/write_test_chapter.xml
+++ b/lib/common_test/doc/src/write_test_chapter.xml
@@ -566,7 +566,7 @@
for the test cases in the group. After execution of the group is finished, function
<seealso marker="common_test#Module:end_per_group-2"><c>end_per_group(GroupName, Config)</c></seealso>
is called. This function is meant to be used for cleaning up after
- <c>init_per_group/2</c>.</p>
+ <c>init_per_group/2</c>. If the init function is defined, so must the end function be.</p>
<p>Whenever a group is executed, if <c>init_per_group</c> and
<c>end_per_group</c> do not exist in the suite, <c>Common Test</c> calls
diff --git a/lib/common_test/src/common_test.app.src b/lib/common_test/src/common_test.app.src
index 77588af59b..dfa321c901 100644
--- a/lib/common_test/src/common_test.app.src
+++ b/lib/common_test/src/common_test.app.src
@@ -22,6 +22,7 @@
{vsn, "%VSN%"},
{modules, [ct_cover,
ct,
+ ct_default_gl,
ct_event,
ct_framework,
ct_ftp,
diff --git a/lib/common_test/src/ct_framework.erl b/lib/common_test/src/ct_framework.erl
index 291a4d716c..43f1c9de0f 100644
--- a/lib/common_test/src/ct_framework.erl
+++ b/lib/common_test/src/ct_framework.erl
@@ -52,6 +52,10 @@
%%%
%%% @doc Test server framework callback, called by the test_server
%%% when a new test case is started.
+init_tc(_,{end_per_testcase_not_run,_},[Config]) ->
+ %% Testcase is completed (skipped or failed), but end_per_testcase
+ %% is not run - don't call pre-hook.
+ {ok,[Config]};
init_tc(Mod,EPTC={end_per_testcase,_},[Config]) ->
%% in case Mod == ct_framework, lookup the suite name
Suite = get_suite_name(Mod, Config),
@@ -62,7 +66,7 @@ init_tc(Mod,EPTC={end_per_testcase,_},[Config]) ->
Other
end;
-init_tc(Mod,Func0,Args) ->
+init_tc(Mod,Func0,Args) ->
%% in case Mod == ct_framework, lookup the suite name
Suite = get_suite_name(Mod, Args),
{Func,HookFunc} = case Func0 of
@@ -84,12 +88,15 @@ init_tc(Mod,Func0,Args) ->
andalso Func=/=end_per_group
andalso ct_util:get_testdata(skip_rest) of
true ->
+ initialize(false,Mod,Func,Args),
{auto_skip,"Repeated test stopped by force_stop option"};
_ ->
case ct_util:get_testdata(curr_tc) of
{Suite,{suite0_failed,{require,Reason}}} ->
+ initialize(false,Mod,Func,Args),
{auto_skip,{require_failed_in_suite0,Reason}};
{Suite,{suite0_failed,_}=Failure} ->
+ initialize(false,Mod,Func,Args),
{fail,Failure};
_ ->
ct_util:update_testdata(curr_tc,
@@ -118,16 +125,14 @@ init_tc(Mod,Func0,Args) ->
end,
init_tc1(Mod,Suite,Func,HookFunc,Args);
{failed,Seq,BadFunc} ->
- {auto_skip,{sequence_failed,Seq,BadFunc}}
+ initialize(false,Mod,Func,Args),
+ {auto_skip,{sequence_failed,Seq,BadFunc}}
end
end
end.
init_tc1(?MODULE,_,error_in_suite,_,[Config0]) when is_list(Config0) ->
- ct_logs:init_tc(false),
- ct_event:notify(#event{name=tc_start,
- node=node(),
- data={?MODULE,error_in_suite}}),
+ initialize(false,?MODULE,error_in_suite),
_ = ct_suite_init(?MODULE,error_in_suite,[],Config0),
case ?val(error,Config0) of
undefined ->
@@ -177,27 +182,21 @@ init_tc1(Mod,Suite,Func,HookFunc,[Config0]) when is_list(Config0) ->
ct_config:delete_default_config(testcase),
HookFunc
end,
- Initialize = fun() ->
- ct_logs:init_tc(false),
- ct_event:notify(#event{name=tc_start,
- node=node(),
- data={Mod,FuncSpec}})
- end,
case add_defaults(Mod,Func,AllGroups) of
Error = {suite0_failed,_} ->
- Initialize(),
+ initialize(false,Mod,FuncSpec),
ct_util:set_testdata({curr_tc,{Suite,Error}}),
{error,Error};
Error = {group0_failed,_} ->
- Initialize(),
+ initialize(false,Mod,FuncSpec),
{auto_skip,Error};
Error = {testcase0_failed,_} ->
- Initialize(),
+ initialize(false,Mod,FuncSpec),
{auto_skip,Error};
{SuiteInfo,MergeResult} ->
case MergeResult of
{error,Reason} ->
- Initialize(),
+ initialize(false,Mod,FuncSpec),
{fail,Reason};
_ ->
init_tc2(Mod,Suite,Func,HookFunc1,
@@ -236,11 +235,8 @@ init_tc2(Mod,Suite,Func,HookFunc,SuiteInfo,MergeResult,Config) ->
Conns ->
ct_util:silence_connections(Conns)
end,
- ct_logs:init_tc(Func == init_per_suite),
FuncSpec = group_or_func(Func,Config),
- ct_event:notify(#event{name=tc_start,
- node=node(),
- data={Mod,FuncSpec}}),
+ initialize((Func==init_per_suite),Mod,FuncSpec),
case catch configure(MergedInfo,MergedInfo,SuiteInfo,
FuncSpec,[],Config) of
@@ -268,6 +264,18 @@ init_tc2(Mod,Suite,Func,HookFunc,SuiteInfo,MergeResult,Config) ->
end
end.
+initialize(RefreshLogs,Mod,Func,[Config]) when is_list(Config) ->
+ initialize(RefreshLogs,Mod,group_or_func(Func,Config));
+initialize(RefreshLogs,Mod,Func,_) ->
+ initialize(RefreshLogs,Mod,Func).
+
+initialize(RefreshLogs,Mod,FuncSpec) ->
+ ct_logs:init_tc(RefreshLogs),
+ ct_event:notify(#event{name=tc_start,
+ node=node(),
+ data={Mod,FuncSpec}}).
+
+
ct_suite_init(Suite,HookFunc,PostInitHook,Config) when is_list(Config) ->
case ct_hooks:init_tc(Suite,HookFunc,Config) of
NewConfig when is_list(NewConfig) ->
@@ -675,22 +683,35 @@ end_tc(Mod,Func,{Result,[Args]}, Return) ->
end_tc(Mod,Func,self(),Result,Args,Return).
end_tc(Mod,IPTC={init_per_testcase,_Func},_TCPid,Result,Args,Return) ->
- %% in case Mod == ct_framework, lookup the suite name
- Suite = get_suite_name(Mod, Args),
- case ct_hooks:end_tc(Suite,IPTC,Args,Result,Return) of
- '$ct_no_change' ->
- ok;
- HookResult ->
- HookResult
+ case end_hook_func(IPTC,Return,IPTC) of
+ undefined -> ok;
+ _ ->
+ %% in case Mod == ct_framework, lookup the suite name
+ Suite = get_suite_name(Mod, Args),
+ case ct_hooks:end_tc(Suite,IPTC,Args,Result,Return) of
+ '$ct_no_change' ->
+ ok;
+ HookResult ->
+ HookResult
+ end
end;
end_tc(Mod,Func0,TCPid,Result,Args,Return) ->
%% in case Mod == ct_framework, lookup the suite name
Suite = get_suite_name(Mod, Args),
- {EPTC,Func} = case Func0 of
- {end_per_testcase,F} -> {true,F};
- _ -> {false,Func0}
- end,
+ {Func,FuncSpec,HookFunc} =
+ case Func0 of
+ {end_per_testcase_not_run,F} ->
+ %% Testcase is completed (skipped or failed), but
+ %% end_per_testcase is not run - don't call post-hook.
+ {F,F,undefined};
+ {end_per_testcase,F} ->
+ {F,F,Func0};
+ _ ->
+ FS = group_or_func(Func0,Args),
+ HF = end_hook_func(Func0,Return,FS),
+ {Func0,FS,HF}
+ end,
test_server:timetrap_cancel(),
@@ -717,20 +738,18 @@ end_tc(Mod,Func0,TCPid,Result,Args,Return) ->
end,
ct_util:delete_suite_data(last_saved_config),
- {FuncSpec,HookFunc} =
- if not EPTC ->
- FS = group_or_func(Func,Args),
- {FS,FS};
- true ->
- {Func,Func0}
- end,
{Result1,FinalNotify} =
- case ct_hooks:end_tc(Suite,HookFunc,Args,Result,Return) of
- '$ct_no_change' ->
- {ok,Result};
- HookResult ->
- {HookResult,HookResult}
- end,
+ case HookFunc of
+ undefined ->
+ {ok,Result};
+ _ ->
+ case ct_hooks:end_tc(Suite,HookFunc,Args,Result,Return) of
+ '$ct_no_change' ->
+ {ok,Result};
+ HookResult ->
+ {HookResult,HookResult}
+ end
+ end,
FinalResult =
case get('$test_server_framework_test') of
undefined ->
@@ -821,6 +840,34 @@ end_tc(Mod,Func0,TCPid,Result,Args,Return) ->
end,
FinalResult.
+%% This is to make sure that no post_init_per_* is ever called if the
+%% corresponding pre_init_per_* was not called.
+%% The skip or fail reasons are those that can be returned from
+%% init_tc above in situations where we never came to call
+%% ct_hooks:init_tc/3, e.g. if suite/0 fails, then we never call
+%% ct_hooks:init_tc for init_per_suite, and thus we must not call
+%% ct_hooks:end_tc for init_per_suite either.
+end_hook_func({init_per_testcase,_},{auto_skip,{sequence_failed,_,_}},_) ->
+ undefined;
+end_hook_func({init_per_testcase,_},{auto_skip,"Repeated test stopped by force_stop option"},_) ->
+ undefined;
+end_hook_func({init_per_testcase,_},{fail,{config_name_already_in_use,_}},_) ->
+ undefined;
+end_hook_func({init_per_testcase,_},{auto_skip,{InfoFuncError,_}},_)
+ when InfoFuncError==testcase0_failed;
+ InfoFuncError==require_failed ->
+ undefined;
+end_hook_func(init_per_group,{auto_skip,{InfoFuncError,_}},_)
+ when InfoFuncError==group0_failed;
+ InfoFuncError==require_failed ->
+ undefined;
+end_hook_func(init_per_suite,{auto_skip,{require_failed_in_suite0,_}},_) ->
+ undefined;
+end_hook_func(init_per_suite,{auto_skip,{failed,{error,{suite0_failed,_}}}},_) ->
+ undefined;
+end_hook_func(_,_,Default) ->
+ Default.
+
%% {error,Reason} | {skip,Reason} | {timetrap_timeout,TVal} |
%% {testcase_aborted,Reason} | testcase_aborted_or_killed |
%% {'EXIT',Reason} | {fail,Reason} | {failed,Reason} |
@@ -1339,25 +1386,25 @@ report(What,Data) ->
ok;
tc_done ->
{Suite,{Func,GrName},Result} = Data,
- Data1 = if GrName == undefined -> {Suite,Func,Result};
- true -> Data
- end,
+ FuncSpec = if GrName == undefined -> Func;
+ true -> {Func,GrName}
+ end,
%% Register the group leader for the process calling the report
%% function, making it possible for a hook function to print
%% in the test case log file
ReportingPid = self(),
ct_logs:register_groupleader(ReportingPid, group_leader()),
case Result of
- {failed, _} ->
- ct_hooks:on_tc_fail(What, Data1);
- {skipped,{failed,{_,init_per_testcase,_}}} ->
- ct_hooks:on_tc_skip(tc_auto_skip, Data1);
- {skipped,{require_failed,_}} ->
- ct_hooks:on_tc_skip(tc_auto_skip, Data1);
- {skipped,_} ->
- ct_hooks:on_tc_skip(tc_user_skip, Data1);
- {auto_skipped,_} ->
- ct_hooks:on_tc_skip(tc_auto_skip, Data1);
+ {failed, Reason} ->
+ ct_hooks:on_tc_fail(What, {Suite,FuncSpec,Reason});
+ {skipped,{failed,{_,init_per_testcase,_}}=Reason} ->
+ ct_hooks:on_tc_skip(tc_auto_skip, {Suite,FuncSpec,Reason});
+ {skipped,{require_failed,_}=Reason} ->
+ ct_hooks:on_tc_skip(tc_auto_skip, {Suite,FuncSpec,Reason});
+ {skipped,Reason} ->
+ ct_hooks:on_tc_skip(tc_user_skip, {Suite,FuncSpec,Reason});
+ {auto_skipped,Reason} ->
+ ct_hooks:on_tc_skip(tc_auto_skip, {Suite,FuncSpec,Reason});
_Else ->
ok
end,
diff --git a/lib/common_test/src/ct_groups.erl b/lib/common_test/src/ct_groups.erl
index 1375e7dcc7..1c9faf6a70 100644
--- a/lib/common_test/src/ct_groups.erl
+++ b/lib/common_test/src/ct_groups.erl
@@ -442,17 +442,21 @@ make_conf(Mod, Name, Props, TestSpec) ->
ok
end,
{InitConf,EndConf,ExtraProps} =
- case erlang:function_exported(Mod,init_per_group,2) of
- true ->
- {{Mod,init_per_group},{Mod,end_per_group},[]};
- false ->
+ case {erlang:function_exported(Mod,init_per_group,2),
+ erlang:function_exported(Mod,end_per_group,2)} of
+ {false,false} ->
ct_logs:log("TEST INFO", "init_per_group/2 and "
"end_per_group/2 missing for group "
"~w in ~w, using default.",
[Name,Mod]),
{{ct_framework,init_per_group},
{ct_framework,end_per_group},
- [{suite,Mod}]}
+ [{suite,Mod}]};
+ _ ->
+ %% If any of these exist, the other should too
+ %% (required and documented). If it isn't, it will fail
+ %% with reason 'undef'.
+ {{Mod,init_per_group},{Mod,end_per_group},[]}
end,
{conf,[{name,Name}|Props++ExtraProps],InitConf,TestSpec,EndConf}.
diff --git a/lib/common_test/src/ct_hooks.erl b/lib/common_test/src/ct_hooks.erl
index c9a4abb5ee..60d1ea2b1c 100644
--- a/lib/common_test/src/ct_hooks.erl
+++ b/lib/common_test/src/ct_hooks.erl
@@ -92,15 +92,17 @@ init_tc(Mod, end_per_suite, Config) ->
call(fun call_generic/3, Config, [pre_end_per_suite, Mod]);
init_tc(Mod, {init_per_group, GroupName, Properties}, Config) ->
maybe_start_locker(Mod, GroupName, Properties),
- call(fun call_generic/3, Config, [pre_init_per_group, GroupName]);
-init_tc(_Mod, {end_per_group, GroupName, _}, Config) ->
- call(fun call_generic/3, Config, [pre_end_per_group, GroupName]);
-init_tc(_Mod, {init_per_testcase,TC}, Config) ->
- call(fun call_generic/3, Config, [pre_init_per_testcase, TC]);
-init_tc(_Mod, {end_per_testcase,TC}, Config) ->
- call(fun call_generic/3, Config, [pre_end_per_testcase, TC]);
-init_tc(_Mod, TC = error_in_suite, Config) ->
- call(fun call_generic/3, Config, [pre_init_per_testcase, TC]).
+ call(fun call_generic_fallback/3, Config,
+ [pre_init_per_group, Mod, GroupName]);
+init_tc(Mod, {end_per_group, GroupName, _}, Config) ->
+ call(fun call_generic_fallback/3, Config,
+ [pre_end_per_group, Mod, GroupName]);
+init_tc(Mod, {init_per_testcase,TC}, Config) ->
+ call(fun call_generic_fallback/3, Config, [pre_init_per_testcase, Mod, TC]);
+init_tc(Mod, {end_per_testcase,TC}, Config) ->
+ call(fun call_generic_fallback/3, Config, [pre_end_per_testcase, Mod, TC]);
+init_tc(Mod, TC = error_in_suite, Config) ->
+ call(fun call_generic_fallback/3, Config, [pre_init_per_testcase, Mod, TC]).
%% @doc Called as each test case is completed. This includes all configuration
%% tests.
@@ -126,23 +128,23 @@ end_tc(Mod, init_per_suite, Config, _Result, Return) ->
end_tc(Mod, end_per_suite, Config, Result, _Return) ->
call(fun call_generic/3, Result, [post_end_per_suite, Mod, Config],
'$ct_no_change');
-end_tc(_Mod, {init_per_group, GroupName, _}, Config, _Result, Return) ->
- call(fun call_generic/3, Return, [post_init_per_group, GroupName, Config],
- '$ct_no_change');
+end_tc(Mod, {init_per_group, GroupName, _}, Config, _Result, Return) ->
+ call(fun call_generic_fallback/3, Return,
+ [post_init_per_group, Mod, GroupName, Config], '$ct_no_change');
end_tc(Mod, {end_per_group, GroupName, Properties}, Config, Result, _Return) ->
- Res = call(fun call_generic/3, Result,
- [post_end_per_group, GroupName, Config], '$ct_no_change'),
+ Res = call(fun call_generic_fallback/3, Result,
+ [post_end_per_group, Mod, GroupName, Config], '$ct_no_change'),
maybe_stop_locker(Mod, GroupName, Properties),
Res;
-end_tc(_Mod, {init_per_testcase,TC}, Config, Result, _Return) ->
- call(fun call_generic/3, Result, [post_init_per_testcase, TC, Config],
- '$ct_no_change');
-end_tc(_Mod, {end_per_testcase,TC}, Config, Result, _Return) ->
- call(fun call_generic/3, Result, [post_end_per_testcase, TC, Config],
- '$ct_no_change');
-end_tc(_Mod, TC = error_in_suite, Config, Result, _Return) ->
- call(fun call_generic/3, Result, [post_end_per_testcase, TC, Config],
- '$ct_no_change').
+end_tc(Mod, {init_per_testcase,TC}, Config, Result, _Return) ->
+ call(fun call_generic_fallback/3, Result,
+ [post_init_per_testcase, Mod, TC, Config], '$ct_no_change');
+end_tc(Mod, {end_per_testcase,TC}, Config, Result, _Return) ->
+ call(fun call_generic_fallback/3, Result,
+ [post_end_per_testcase, Mod, TC, Config], '$ct_no_change');
+end_tc(Mod, TC = error_in_suite, Config, Result, _Return) ->
+ call(fun call_generic_fallback/3, Result,
+ [post_end_per_testcase, Mod, TC, Config], '$ct_no_change').
%% Case = TestCase | {TestCase,GroupName}
@@ -181,15 +183,21 @@ call_terminate(#ct_hook_config{ module = Mod, state = State} = Hook, _, _) ->
{[],Hook}.
call_cleanup(#ct_hook_config{ module = Mod, state = State} = Hook,
- Reason, [Function, _Suite | Args]) ->
+ Reason, [Function | Args]) ->
NewState = catch_apply(Mod,Function, Args ++ [Reason, State],
- State),
+ State, true),
{Reason, Hook#ct_hook_config{ state = NewState } }.
-call_generic(#ct_hook_config{ module = Mod, state = State} = Hook,
- Value, [Function | Args]) ->
+call_generic(Hook, Value, Meta) ->
+ do_call_generic(Hook, Value, Meta, false).
+
+call_generic_fallback(Hook, Value, Meta) ->
+ do_call_generic(Hook, Value, Meta, true).
+
+do_call_generic(#ct_hook_config{ module = Mod, state = State} = Hook,
+ Value, [Function | Args], Fallback) ->
{NewValue, NewState} = catch_apply(Mod, Function, Args ++ [Value, State],
- {Value,State}),
+ {Value,State}, Fallback),
{NewValue, Hook#ct_hook_config{ state = NewState } }.
%% Generic call function
@@ -257,15 +265,15 @@ remove(Key,List) when is_list(List) ->
remove(_, Else) ->
Else.
-%% Translate scopes, i.e. init_per_group,group1 -> end_per_group,group1 etc
-scope([pre_init_per_testcase, TC|_]) ->
- [post_init_per_testcase, TC];
-scope([pre_end_per_testcase, TC|_]) ->
- [post_end_per_testcase, TC];
-scope([pre_init_per_group, GroupName|_]) ->
- [post_end_per_group, GroupName];
-scope([post_init_per_group, GroupName|_]) ->
- [post_end_per_group, GroupName];
+%% Translate scopes, i.e. is_tuplenit_per_group,group1 -> end_per_group,group1 etc
+scope([pre_init_per_testcase, SuiteName, TC|_]) ->
+ [post_init_per_testcase, SuiteName, TC];
+scope([pre_end_per_testcase, SuiteName, TC|_]) ->
+ [post_end_per_testcase, SuiteName, TC];
+scope([pre_init_per_group, SuiteName, GroupName|_]) ->
+ [post_end_per_group, SuiteName, GroupName];
+scope([post_init_per_group, SuiteName, GroupName|_]) ->
+ [post_end_per_group, SuiteName, GroupName];
scope([pre_init_per_suite, SuiteName|_]) ->
[post_end_per_suite, SuiteName];
scope([post_init_per_suite, SuiteName|_]) ->
@@ -273,14 +281,29 @@ scope([post_init_per_suite, SuiteName|_]) ->
scope(init) ->
none.
-terminate_if_scope_ends(HookId, [on_tc_skip,_Suite,{end_per_group,Name}],
+strip_config([post_init_per_testcase, SuiteName, TC|_]) ->
+ [post_init_per_testcase, SuiteName, TC];
+strip_config([post_end_per_testcase, SuiteName, TC|_]) ->
+ [post_end_per_testcase, SuiteName, TC];
+strip_config([post_init_per_group, SuiteName, GroupName|_]) ->
+ [post_init_per_group, SuiteName, GroupName];
+strip_config([post_end_per_group, SuiteName, GroupName|_]) ->
+ [post_end_per_group, SuiteName, GroupName];
+strip_config([post_init_per_suite, SuiteName|_]) ->
+ [post_init_per_suite, SuiteName];
+strip_config([post_end_per_suite, SuiteName|_]) ->
+ [post_end_per_suite, SuiteName];
+strip_config(Other) ->
+ Other.
+
+
+terminate_if_scope_ends(HookId, [on_tc_skip,Suite,{end_per_group,Name}],
Hooks) ->
- terminate_if_scope_ends(HookId, [post_end_per_group, Name], Hooks);
+ terminate_if_scope_ends(HookId, [post_end_per_group, Suite, Name], Hooks);
terminate_if_scope_ends(HookId, [on_tc_skip,Suite,end_per_suite], Hooks) ->
terminate_if_scope_ends(HookId, [post_end_per_suite, Suite], Hooks);
-terminate_if_scope_ends(HookId, [Function,Tag|T], Hooks) when T =/= [] ->
- terminate_if_scope_ends(HookId,[Function,Tag],Hooks);
-terminate_if_scope_ends(HookId, Function, Hooks) ->
+terminate_if_scope_ends(HookId, Function0, Hooks) ->
+ Function = strip_config(Function0),
case lists:keyfind(HookId, #ct_hook_config.id, Hooks) of
#ct_hook_config{ id = HookId, scope = Function} = Hook ->
terminate([Hook]),
@@ -384,21 +407,29 @@ pos(Id,[_|Rest],Num) ->
catch_apply(M,F,A, Default) ->
+ catch_apply(M,F,A,Default,false).
+catch_apply(M,F,A, Default, Fallback) ->
+ not erlang:module_loaded(M) andalso (catch M:module_info()),
+ case erlang:function_exported(M,F,length(A)) of
+ false when Fallback ->
+ catch_apply(M,F,tl(A),Default,false);
+ false ->
+ Default;
+ true ->
+ catch_apply(M,F,A)
+ end.
+
+catch_apply(M,F,A) ->
try
- erlang:apply(M,F,A)
+ erlang:apply(M,F,A)
catch _:Reason ->
- case erlang:get_stacktrace() of
- %% Return the default if it was the CTH module which did not have the function.
- [{M,F,A,_}|_] when Reason == undef ->
- Default;
- Trace ->
- ct_logs:log("Suite Hook","Call to CTH failed: ~w:~p",
- [error,{Reason,Trace}]),
- throw({error_in_cth_call,
- lists:flatten(
- io_lib:format("~w:~w/~w CTH call failed",
- [M,F,length(A)]))})
- end
+ Trace = erlang:get_stacktrace(),
+ ct_logs:log("Suite Hook","Call to CTH failed: ~w:~p",
+ [error,{Reason,Trace}]),
+ throw({error_in_cth_call,
+ lists:flatten(
+ io_lib:format("~w:~w/~w CTH call failed",
+ [M,F,length(A)]))})
end.
diff --git a/lib/common_test/src/ct_release_test.erl b/lib/common_test/src/ct_release_test.erl
index d783f8d04e..c53e72ee88 100644
--- a/lib/common_test/src/ct_release_test.erl
+++ b/lib/common_test/src/ct_release_test.erl
@@ -132,7 +132,7 @@
%%-----------------------------------------------------------------
-define(testnode, 'ct_release_test-upgrade').
--define(exclude_apps, [hipe, typer, dialyzer]). % never include these apps
+-define(exclude_apps, [hipe, dialyzer]). % never include these apps
%%-----------------------------------------------------------------
-record(ct_data, {from,to}).
diff --git a/lib/common_test/src/ct_run.erl b/lib/common_test/src/ct_run.erl
index a049ef5695..cac176de3a 100644
--- a/lib/common_test/src/ct_run.erl
+++ b/lib/common_test/src/ct_run.erl
@@ -76,8 +76,8 @@
abort_if_missing_suites,
silent_connections = [],
stylesheet,
- multiply_timetraps = 1,
- scale_timetraps = false,
+ multiply_timetraps,
+ scale_timetraps,
create_priv_dir,
testspec_files = [],
current_testspec,
@@ -264,11 +264,11 @@ script_start1(Parent, Args) ->
[], Args),
Verbosity = verbosity_args2opts(Args),
MultTT = get_start_opt(multiply_timetraps,
- fun([MT]) -> list_to_integer(MT) end, 1, Args),
+ fun([MT]) -> list_to_integer(MT) end, Args),
ScaleTT = get_start_opt(scale_timetraps,
fun([CT]) -> list_to_atom(CT);
([]) -> true
- end, false, Args),
+ end, Args),
CreatePrivDir = get_start_opt(create_priv_dir,
fun([PD]) -> list_to_atom(PD);
([]) -> auto_per_tc
@@ -1055,8 +1055,8 @@ run_test2(StartOpts) ->
CoverStop = get_start_opt(cover_stop, value, StartOpts),
%% timetrap manipulation
- MultiplyTT = get_start_opt(multiply_timetraps, value, 1, StartOpts),
- ScaleTT = get_start_opt(scale_timetraps, value, false, StartOpts),
+ MultiplyTT = get_start_opt(multiply_timetraps, value, StartOpts),
+ ScaleTT = get_start_opt(scale_timetraps, value, StartOpts),
%% create unique priv dir names
CreatePrivDir = get_start_opt(create_priv_dir, value, StartOpts),
@@ -2280,8 +2280,19 @@ do_run_test(Tests, Skip, Opts0) ->
_Lower ->
ok
end,
- test_server_ctrl:multiply_timetraps(Opts0#opts.multiply_timetraps),
- test_server_ctrl:scale_timetraps(Opts0#opts.scale_timetraps),
+
+ case Opts0#opts.multiply_timetraps of
+ undefined -> MultTT = 1;
+ MultTT -> MultTT
+ end,
+ case Opts0#opts.scale_timetraps of
+ undefined -> ScaleTT = false;
+ ScaleTT -> ScaleTT
+ end,
+ ct_logs:log("TEST INFO","Timetrap time multiplier = ~w~n"
+ "Timetrap scaling enabled = ~w", [MultTT,ScaleTT]),
+ test_server_ctrl:multiply_timetraps(MultTT),
+ test_server_ctrl:scale_timetraps(ScaleTT),
test_server_ctrl:create_priv_dir(choose_val(
Opts0#opts.create_priv_dir,
diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl
index 991abb0666..466a2c7658 100644
--- a/lib/common_test/src/ct_testspec.erl
+++ b/lib/common_test/src/ct_testspec.erl
@@ -26,7 +26,8 @@
-export([prepare_tests/1, prepare_tests/2,
collect_tests_from_list/2, collect_tests_from_list/3,
- collect_tests_from_file/2, collect_tests_from_file/3]).
+ collect_tests_from_file/2, collect_tests_from_file/3,
+ get_tests/1]).
-export([testspec_rec2list/1, testspec_rec2list/2]).
@@ -803,6 +804,31 @@ list_nodes(#testspec{nodes=NodeRefs}) ->
lists:map(fun({_Ref,Node}) -> Node end, NodeRefs).
+%%%-----------------------------------------------------------------
+%%% Parse the given test specs and return the complete set of specs
+%%% and tests to run/skip.
+%%% [Spec1,Spec2,...] means create separate tests per spec
+%%% [[Spec1,Spec2,...]] means merge all specs into one
+-spec get_tests(Specs) -> {ok,[{Specs,Tests}]} | {error,Reason} when
+ Specs :: [string()] | [[string()]],
+ Tests :: {Node,Run,Skip},
+ Node :: atom(),
+ Run :: {Dir,Suites,Cases},
+ Skip :: {Dir,Suites,Comment} | {Dir,Suites,Cases,Comment},
+ Dir :: string(),
+ Suites :: atom | [atom()] | all,
+ Cases :: atom | [atom()] | all,
+ Comment :: string(),
+ Reason :: term().
+
+get_tests(Specs) ->
+ case collect_tests_from_file(Specs,true) of
+ Tests when is_list(Tests) ->
+ {ok,[{S,prepare_tests(R)} || {S,R} <- Tests]};
+ Error ->
+ Error
+ end.
+
%% -----------------------------------------------------
%% / \
%% | When adding test/config terms, remember to update |
@@ -1132,6 +1158,11 @@ handle_data(verbosity,Node,VLvls,_Spec) when is_list(VLvls) ->
VLvls1 = lists:map(fun(VLvl = {_Cat,_Lvl}) -> VLvl;
(Lvl) -> {'$unspecified',Lvl} end, VLvls),
[{Node,VLvls1}];
+handle_data(multiply_timetraps,Node,Mult,_Spec) when is_integer(Mult) ->
+ [{Node,Mult}];
+handle_data(scale_timetraps,Node,Scale,_Spec) when Scale == true;
+ Scale == false ->
+ [{Node,Scale}];
handle_data(silent_connections,Node,all,_Spec) ->
[{Node,[all]}];
handle_data(silent_connections,Node,Conn,_Spec) when is_atom(Conn) ->
@@ -1150,6 +1181,8 @@ should_be_added(Tag,Node,_Data,Spec) ->
Tag == label; Tag == auto_compile;
Tag == abort_if_missing_suites;
Tag == stylesheet; Tag == verbosity;
+ Tag == multiply_timetraps;
+ Tag == scale_timetraps;
Tag == silent_connections ->
lists:keymember(ref2node(Node,Spec#testspec.nodes),1,
read_field(Spec,Tag)) == false;
diff --git a/lib/common_test/src/cth_conn_log.erl b/lib/common_test/src/cth_conn_log.erl
index 883da0da0a..ce8852b3ea 100644
--- a/lib/common_test/src/cth_conn_log.erl
+++ b/lib/common_test/src/cth_conn_log.erl
@@ -54,8 +54,8 @@
-include_lib("common_test/include/ct.hrl").
-export([init/2,
- pre_init_per_testcase/3,
- post_end_per_testcase/4]).
+ pre_init_per_testcase/4,
+ post_end_per_testcase/5]).
%%----------------------------------------------------------------------
%% Exported types
@@ -104,7 +104,7 @@ get_log_opts(Mod,Opts) ->
Hosts = proplists:get_value(hosts,Opts,[]),
{LogType,Hosts}.
-pre_init_per_testcase(TestCase,Config,CthState) ->
+pre_init_per_testcase(_Suite,TestCase,Config,CthState) ->
Logs =
lists:map(
fun({ConnMod,{LogType,Hosts}}) ->
@@ -158,7 +158,7 @@ pre_init_per_testcase(TestCase,Config,CthState) ->
ct_util:update_testdata(?MODULE, Update, [create]),
{Config,CthState}.
-post_end_per_testcase(TestCase,_Config,Return,CthState) ->
+post_end_per_testcase(_Suite,TestCase,_Config,Return,CthState) ->
Update =
fun(PrevUsers) ->
case lists:delete(TestCase, PrevUsers) of
diff --git a/lib/common_test/src/cth_log_redirect.erl b/lib/common_test/src/cth_log_redirect.erl
index 6d77d7ee9e..eda090d4f5 100644
--- a/lib/common_test/src/cth_log_redirect.erl
+++ b/lib/common_test/src/cth_log_redirect.erl
@@ -28,10 +28,10 @@
%% CTH Callbacks
-export([id/1, init/2,
pre_init_per_suite/3, pre_end_per_suite/3, post_end_per_suite/4,
- pre_init_per_group/3, post_init_per_group/4,
- pre_end_per_group/3, post_end_per_group/4,
- pre_init_per_testcase/3, post_init_per_testcase/4,
- pre_end_per_testcase/3, post_end_per_testcase/4]).
+ pre_init_per_group/4, post_init_per_group/5,
+ pre_end_per_group/4, post_end_per_group/5,
+ pre_init_per_testcase/4, post_init_per_testcase/5,
+ pre_end_per_testcase/4, post_end_per_testcase/5]).
%% Event handler Callbacks
-export([init/1,
@@ -71,11 +71,11 @@ post_end_per_suite(_Suite, Config, Return, State) ->
set_curr_func(undefined, Config),
{Return, State}.
-pre_init_per_group(Group, Config, State) ->
+pre_init_per_group(_Suite, Group, Config, State) ->
set_curr_func({group,Group,init_per_group}, Config),
{Config, State}.
-post_init_per_group(Group, Config, Result, tc_log_async) when is_list(Config) ->
+post_init_per_group(_Suite, Group, Config, Result, tc_log_async) when is_list(Config) ->
case lists:member(parallel,proplists:get_value(
tc_group_properties,Config,[])) of
true ->
@@ -83,33 +83,33 @@ post_init_per_group(Group, Config, Result, tc_log_async) when is_list(Config) ->
false ->
{Result, tc_log_async}
end;
-post_init_per_group(_Group, _Config, Result, State) ->
+post_init_per_group(_Suite, _Group, _Config, Result, State) ->
{Result, State}.
-pre_init_per_testcase(TC, Config, State) ->
+pre_init_per_testcase(_Suite, TC, Config, State) ->
set_curr_func(TC, Config),
{Config, State}.
-post_init_per_testcase(_TC, _Config, Return, State) ->
+post_init_per_testcase(_Suite, _TC, _Config, Return, State) ->
{Return, State}.
-pre_end_per_testcase(_TC, Config, State) ->
+pre_end_per_testcase(_Suite, _TC, Config, State) ->
{Config, State}.
-post_end_per_testcase(_TC, _Config, Result, State) ->
+post_end_per_testcase(_Suite, _TC, _Config, Result, State) ->
%% Make sure that the event queue is flushed
%% before ending this test case.
gen_event:call(error_logger, ?MODULE, flush, 300000),
{Result, State}.
-pre_end_per_group(Group, Config, {tc_log, Group}) ->
+pre_end_per_group(_Suite, Group, Config, {tc_log, Group}) ->
set_curr_func({group,Group,end_per_group}, Config),
{Config, set_log_func(tc_log_async)};
-pre_end_per_group(Group, Config, State) ->
+pre_end_per_group(_Suite, Group, Config, State) ->
set_curr_func({group,Group,end_per_group}, Config),
{Config, State}.
-post_end_per_group(_Group, Config, Return, State) ->
+post_end_per_group(_Suite, _Group, Config, Return, State) ->
set_curr_func({group,undefined}, Config),
{Return, State}.
diff --git a/lib/common_test/src/cth_surefire.erl b/lib/common_test/src/cth_surefire.erl
index 59b916851e..c4941948cc 100644
--- a/lib/common_test/src/cth_surefire.erl
+++ b/lib/common_test/src/cth_surefire.erl
@@ -33,16 +33,16 @@
-export([pre_end_per_suite/3]).
-export([post_end_per_suite/4]).
--export([pre_init_per_group/3]).
--export([post_init_per_group/4]).
--export([pre_end_per_group/3]).
--export([post_end_per_group/4]).
+-export([pre_init_per_group/4]).
+-export([post_init_per_group/5]).
+-export([pre_end_per_group/4]).
+-export([post_end_per_group/5]).
--export([pre_init_per_testcase/3]).
--export([post_end_per_testcase/4]).
+-export([pre_init_per_testcase/4]).
+-export([post_end_per_testcase/5]).
--export([on_tc_fail/3]).
--export([on_tc_skip/3]).
+-export([on_tc_fail/4]).
+-export([on_tc_skip/4]).
-export([terminate/1]).
@@ -116,29 +116,29 @@ pre_end_per_suite(_Suite,Config,State) ->
post_end_per_suite(_Suite,Config,Result,State) ->
{Result, end_tc(end_per_suite,Config,Result,State)}.
-pre_init_per_group(Group,Config,State) ->
+pre_init_per_group(_Suite,Group,Config,State) ->
{Config, init_tc(State#state{ curr_group = [Group|State#state.curr_group]},
Config)}.
-post_init_per_group(_Group,Config,Result,State) ->
+post_init_per_group(_Suite,_Group,Config,Result,State) ->
{Result, end_tc(init_per_group,Config,Result,State)}.
-pre_end_per_group(_Group,Config,State) ->
+pre_end_per_group(_Suite,_Group,Config,State) ->
{Config, init_tc(State, Config)}.
-post_end_per_group(_Group,Config,Result,State) ->
+post_end_per_group(_Suite,_Group,Config,Result,State) ->
NewState = end_tc(end_per_group, Config, Result, State),
{Result, NewState#state{ curr_group = tl(NewState#state.curr_group)}}.
-pre_init_per_testcase(_TC,Config,State) ->
+pre_init_per_testcase(_Suite,_TC,Config,State) ->
{Config, init_tc(State, Config)}.
-post_end_per_testcase(TC,Config,Result,State) ->
+post_end_per_testcase(_Suite,TC,Config,Result,State) ->
{Result, end_tc(TC,Config, Result,State)}.
-on_tc_fail(_TC, _Res, State = #state{test_cases = []}) ->
+on_tc_fail(_Suite,_TC, _Res, State = #state{test_cases = []}) ->
State;
-on_tc_fail(_TC, Res, State) ->
+on_tc_fail(_Suite,_TC, Res, State) ->
TCs = State#state.test_cases,
TC = hd(TCs),
NewTC = TC#testcase{
@@ -146,10 +146,9 @@ on_tc_fail(_TC, Res, State) ->
{fail,lists:flatten(io_lib:format("~p",[Res]))} },
State#state{ test_cases = [NewTC | tl(TCs)]}.
-on_tc_skip({ConfigFunc,_GrName},{Type,_Reason} = Res, State0)
- when Type == tc_auto_skip; Type == tc_user_skip ->
- on_tc_skip(ConfigFunc, Res, State0);
-on_tc_skip(Tc,{Type,_Reason} = Res, State0) when Type == tc_auto_skip ->
+on_tc_skip(Suite,{ConfigFunc,_GrName}, Res, State) ->
+ on_tc_skip(Suite,ConfigFunc, Res, State);
+on_tc_skip(Suite,Tc, Res, State0) ->
TcStr = atom_to_list(Tc),
State =
case State0#state.test_cases of
@@ -158,11 +157,7 @@ on_tc_skip(Tc,{Type,_Reason} = Res, State0) when Type == tc_auto_skip ->
_ ->
State0
end,
- do_tc_skip(Res, end_tc(Tc,[],Res,init_tc(State,[])));
-on_tc_skip(_Tc, _Res, State = #state{test_cases = []}) ->
- State;
-on_tc_skip(_Tc, Res, State) ->
- do_tc_skip(Res, State).
+ do_tc_skip(Res, end_tc(Tc,[],Res,init_tc(set_suite(Suite,State),[]))).
do_tc_skip(Res, State) ->
TCs = State#state.test_cases,
@@ -209,6 +204,12 @@ end_tc(Name, _Config, _Res, State = #state{ curr_suite = Suite,
result = passed }|
State#state.test_cases],
tc_log = ""}. % so old tc_log is not set if next is on_tc_skip
+
+set_suite(Suite,#state{curr_suite=undefined}=State) ->
+ State#state{curr_suite=Suite, curr_suite_ts=?now};
+set_suite(_,State) ->
+ State.
+
close_suite(#state{ test_cases = [] } = State) ->
State;
close_suite(#state{ test_cases = TCs, url_base = UrlBase } = State) ->
@@ -228,7 +229,8 @@ close_suite(#state{ test_cases = TCs, url_base = UrlBase } = State) ->
testcases = lists:reverse(TCs),
log = SuiteLog,
url = SuiteUrl},
- State#state{ test_cases = [],
+ State#state{ curr_suite = undefined,
+ test_cases = [],
test_suites = [Suite | State#state.test_suites]}.
terminate(State = #state{ test_cases = [] }) ->
diff --git a/lib/common_test/src/test_server.erl b/lib/common_test/src/test_server.erl
index 924086f2bd..be49191f2e 100644
--- a/lib/common_test/src/test_server.erl
+++ b/lib/common_test/src/test_server.erl
@@ -778,9 +778,9 @@ spawn_fw_call(Mod,IPTC={init_per_testcase,Func},CurrConf,Pid,
%% if init_per_testcase fails, the test case
%% should be skipped
try begin do_end_tc_call(Mod,IPTC, {Pid,Skip,[CurrConf]}, Why),
- do_init_tc_call(Mod,{end_per_testcase,Func},
+ do_init_tc_call(Mod,{end_per_testcase_not_run,Func},
[CurrConf],{ok,[CurrConf]}),
- do_end_tc_call(Mod,{end_per_testcase,Func},
+ do_end_tc_call(Mod,{end_per_testcase_not_run,Func},
{Pid,Skip,[CurrConf]}, Why) end of
_ -> ok
catch
@@ -1151,14 +1151,14 @@ do_end_tc_call(Mod, IPTC={init_per_testcase,Func}, Res, Return) ->
Args
end,
EPTCInitRes =
- case do_init_tc_call(Mod,{end_per_testcase,Func},
+ case do_init_tc_call(Mod,{end_per_testcase_not_run,Func},
IPTCEndRes,Return) of
{ok,EPTCInitConfig} when is_list(EPTCInitConfig) ->
{Return,EPTCInitConfig};
_ ->
- Return
+ {Return,IPTCEndRes}
end,
- do_end_tc_call1(Mod, {end_per_testcase,Func},
+ do_end_tc_call1(Mod, {end_per_testcase_not_run,Func},
EPTCInitRes, Return);
_Ok ->
do_end_tc_call1(Mod, IPTC, Res, Return)
diff --git a/lib/common_test/src/test_server_ctrl.erl b/lib/common_test/src/test_server_ctrl.erl
index b52e4bef9b..39c523f8b3 100644
--- a/lib/common_test/src/test_server_ctrl.erl
+++ b/lib/common_test/src/test_server_ctrl.erl
@@ -2051,17 +2051,21 @@ add_init_and_end_per_suite([], _LastMod, skipped_suite, _FwMod) ->
add_init_and_end_per_suite([], LastMod, LastRef, FwMod) ->
%% we'll add end_per_suite here even if it's not exported
%% (and simply let the call fail if it's missing)
- case erlang:function_exported(LastMod, end_per_suite, 1) of
- true ->
- [{conf,LastRef,[],{LastMod,end_per_suite}}];
- false ->
+ case {erlang:function_exported(LastMod, end_per_suite, 1),
+ erlang:function_exported(LastMod, init_per_suite, 1)} of
+ {false,false} ->
%% let's call a "fake" end_per_suite if it exists
case erlang:function_exported(FwMod, end_per_suite, 1) of
true ->
[{conf,LastRef,[{suite,LastMod}],{FwMod,end_per_suite}}];
false ->
[{conf,LastRef,[],{LastMod,end_per_suite}}]
- end
+ end;
+ _ ->
+ %% If any of these exist, the other should too
+ %% (required and documented). If it isn't, it will fail
+ %% with reason 'undef'.
+ [{conf,LastRef,[],{LastMod,end_per_suite}}]
end.
do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) ->
@@ -2070,11 +2074,9 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) ->
_ -> ok
end,
{Init,NextMod,NextRef} =
- case erlang:function_exported(Mod, init_per_suite, 1) of
- true ->
- Ref = make_ref(),
- {[{conf,Ref,[],{Mod,init_per_suite}}],Mod,Ref};
- false ->
+ case {erlang:function_exported(Mod, init_per_suite, 1),
+ erlang:function_exported(Mod, end_per_suite, 1)} of
+ {false,false} ->
%% let's call a "fake" init_per_suite if it exists
case erlang:function_exported(FwMod, init_per_suite, 1) of
true ->
@@ -2083,8 +2085,13 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) ->
{FwMod,init_per_suite}}],Mod,Ref};
false ->
{[],Mod,undefined}
- end
-
+ end;
+ _ ->
+ %% If any of these exist, the other should too
+ %% (required and documented). If it isn't, it will fail
+ %% with reason 'undef'.
+ Ref = make_ref(),
+ {[{conf,Ref,[],{Mod,init_per_suite}}],Mod,Ref}
end,
Cases =
if LastRef==undefined ->
@@ -2094,10 +2101,9 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) ->
true ->
%% we'll add end_per_suite here even if it's not exported
%% (and simply let the call fail if it's missing)
- case erlang:function_exported(LastMod, end_per_suite, 1) of
- true ->
- [{conf,LastRef,[],{LastMod,end_per_suite}}|Init];
- false ->
+ case {erlang:function_exported(LastMod, end_per_suite, 1),
+ erlang:function_exported(LastMod, init_per_suite, 1)} of
+ {false,false} ->
%% let's call a "fake" end_per_suite if it exists
case erlang:function_exported(FwMod, end_per_suite, 1) of
true ->
@@ -2105,8 +2111,13 @@ do_add_init_and_end_per_suite(LastMod, LastRef, Mod, FwMod) ->
{FwMod,end_per_suite}}|Init];
false ->
[{conf,LastRef,[],{LastMod,end_per_suite}}|Init]
- end
- end
+ end;
+ _ ->
+ %% If any of these exist, the other should too
+ %% (required and documented). If it isn't, it will fail
+ %% with reason 'undef'.
+ [{conf,LastRef,[],{LastMod,end_per_suite}}|Init]
+ end
end,
{Cases,NextMod,NextRef}.
@@ -2115,11 +2126,9 @@ do_add_end_per_suite_and_skip(LastMod, LastRef, Mod, FwMod) ->
No when No==undefined ; No==skipped_suite ->
{[],Mod,skipped_suite};
_Ref ->
- case erlang:function_exported(LastMod, end_per_suite, 1) of
- true ->
- {[{conf,LastRef,[],{LastMod,end_per_suite}}],
- Mod,skipped_suite};
- false ->
+ case {erlang:function_exported(LastMod, end_per_suite, 1),
+ erlang:function_exported(LastMod, init_per_suite, 1)} of
+ {false,false} ->
case erlang:function_exported(FwMod, end_per_suite, 1) of
true ->
%% let's call "fake" end_per_suite if it exists
@@ -2128,7 +2137,13 @@ do_add_end_per_suite_and_skip(LastMod, LastRef, Mod, FwMod) ->
false ->
{[{conf,LastRef,[],{LastMod,end_per_suite}}],
Mod,skipped_suite}
- end
+ end;
+ _ ->
+ %% If any of these exist, the other should too
+ %% (required and documented). If it isn't, it will fail
+ %% with reason 'undef'.
+ {[{conf,LastRef,[],{LastMod,end_per_suite}}],
+ Mod,skipped_suite}
end
end.
@@ -2924,22 +2939,21 @@ run_test_cases_loop([{Mod,Func,Args}|Cases], Config, TimetrapData, Mode, Status)
exit(framework_error);
%% sequential execution of test case finished
{Time,RetVal,_} ->
+ RetTag =
+ if is_tuple(RetVal) -> element(1,RetVal);
+ true -> undefined
+ end,
{Failed,Status1} =
- case Time of
- died ->
- {true,update_status(failed, Mod, Func, Status)};
- _ when is_tuple(RetVal) ->
- case element(1, RetVal) of
- R when R=='EXIT'; R==failed ->
- {true,update_status(failed, Mod, Func, Status)};
- R when R==skip; R==skipped ->
- {false,update_status(skipped, Mod, Func, Status)};
- _ ->
- {false,update_status(ok, Mod, Func, Status)}
- end;
- _ ->
- {false,update_status(ok, Mod, Func, Status)}
- end,
+ case RetTag of
+ Skip when Skip==skip; Skip==skipped ->
+ {false,update_status(skipped, Mod, Func, Status)};
+ Fail when Fail=='EXIT'; Fail==failed ->
+ {true,update_status(failed, Mod, Func, Status)};
+ _ when Time==died, RetVal=/=ok ->
+ {true,update_status(failed, Mod, Func, Status)};
+ _ ->
+ {false,update_status(ok, Mod, Func, Status)}
+ end,
case check_prop(sequence, Mode) of
false ->
stop_minor_log_file(),
@@ -3794,7 +3808,15 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit,
{died,{timetrap_timeout,TimetrapTimeout}} ->
progress(failed, Num, Mod, Func, GrName, Loc,
timetrap_timeout, TimetrapTimeout, Comment, Style);
- {died,Reason} ->
+ {died,{Skip,Reason}} when Skip==skip; Skip==skipped ->
+ %% died in init_per_testcase
+ progress(skip, Num, Mod, Func, GrName, Loc, Reason,
+ Time, Comment, Style);
+ {died,Reason} when Reason=/=ok ->
+ %% (If Reason==ok it means that process died in
+ %% end_per_testcase after successfully completing the
+ %% test case itself - then we shall not fail, but a
+ %% warning will be issued in the comment field.)
progress(failed, Num, Mod, Func, GrName, Loc, Reason,
Time, Comment, Style);
{_,{'EXIT',{Skip,Reason}}} when Skip==skip; Skip==skipped;
@@ -3943,6 +3965,9 @@ progress(skip, CaseNum, Mod, Func, GrName, Loc, Reason, Time,
[get_info_str(Mod,Func, CaseNum, get(test_server_cases))]),
test_server_sup:framework_call(report, [tc_done,{Mod,{Func,GrName},
{ReportTag,Reason1}}]),
+ TimeStr = io_lib:format(if is_float(Time) -> "~.3fs";
+ true -> "~w"
+ end, [Time]),
ReasonStr = escape_chars(reason_to_string(Reason1)),
ReasonStr1 = lists:flatten([string:strip(S,left) ||
S <- string:tokens(ReasonStr,[$\n])]),
@@ -3957,10 +3982,10 @@ progress(skip, CaseNum, Mod, Func, GrName, Loc, Reason, Time,
_ -> xhtml("<br>(","<br />(") ++ to_string(Comment) ++ ")"
end,
print(html,
- "<td>" ++ St0 ++ "~.3fs" ++ St1 ++ "</td>"
+ "<td>" ++ St0 ++ "~ts" ++ St1 ++ "</td>"
"<td><font color=\"~ts\">SKIPPED</font></td>"
"<td>~ts~ts</td></tr>\n",
- [Time,Color,ReasonStr2,Comment1]),
+ [TimeStr,Color,ReasonStr2,Comment1]),
FormatLoc = test_server_sup:format_loc(Loc),
print(minor, "=== Location: ~ts", [FormatLoc]),
print(minor, "=== Reason: ~ts", [ReasonStr1]),
@@ -4098,6 +4123,9 @@ progress(ok, _CaseNum, Mod, Func, GrName, _Loc, RetVal, Time,
Comment0, {St0,St1}) ->
print(minor, "successfully completed test case", []),
test_server_sup:framework_call(report, [tc_done,{Mod,{Func,GrName},ok}]),
+ TimeStr = io_lib:format(if is_float(Time) -> "~.3fs";
+ true -> "~w"
+ end, [Time]),
Comment =
case RetVal of
{comment,RetComment} ->
@@ -4116,10 +4144,10 @@ progress(ok, _CaseNum, Mod, Func, GrName, _Loc, RetVal, Time,
end,
print(major, "=elapsed ~p", [Time]),
print(html,
- "<td>" ++ St0 ++ "~.3fs" ++ St1 ++ "</td>"
+ "<td>" ++ St0 ++ "~ts" ++ St1 ++ "</td>"
"<td><font color=\"green\">Ok</font></td>"
"~ts</tr>\n",
- [Time,Comment]),
+ [TimeStr,Comment]),
print(minor,
escape_chars(io_lib:format("=== Returned value: ~tp", [RetVal])),
[]),
diff --git a/lib/common_test/test/Makefile b/lib/common_test/test/Makefile
index b1eddfedd7..2f0fc2e05a 100644
--- a/lib/common_test/test/Makefile
+++ b/lib/common_test/test/Makefile
@@ -70,7 +70,8 @@ MODULES= \
test_server_SUITE \
test_server_test_lib \
ct_release_test_SUITE \
- ct_log_SUITE
+ ct_log_SUITE \
+ ct_SUITE
ERL_FILES= $(MODULES:%=%.erl)
HRL_FILES= test_server_test_lib.hrl
diff --git a/lib/common_test/test/ct_SUITE.erl b/lib/common_test/test/ct_SUITE.erl
new file mode 100644
index 0000000000..eb98c2544f
--- /dev/null
+++ b/lib/common_test/test/ct_SUITE.erl
@@ -0,0 +1,53 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2009-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(ct_SUITE).
+
+-compile(export_all).
+
+-include_lib("common_test/include/ct.hrl").
+
+suite() ->
+ [{timetrap,{seconds,30}}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_TestCase, _Config) ->
+ ok.
+
+all() ->
+ [app_file, appup_file].
+
+%%%-----------------------------------------------------------------
+%%% Test cases
+
+app_file(_Config) ->
+ ok = test_server:app_test(common_test),
+ ok.
+
+appup_file(_Config) ->
+ ok = test_server:appup_test(common_test).
+
diff --git a/lib/common_test/test/ct_error_SUITE.erl b/lib/common_test/test/ct_error_SUITE.erl
index fae23484e6..621f3b6d2d 100644
--- a/lib/common_test/test/ct_error_SUITE.erl
+++ b/lib/common_test/test/ct_error_SUITE.erl
@@ -1531,17 +1531,17 @@ test_events(config_func_errors) ->
{?eh,tc_start,{config_func_error_1_SUITE,exit_in_iptc}},
{?eh,tc_done,{config_func_error_1_SUITE,exit_in_iptc,'_'}},
- {?eh,test_stats,{0,1,{0,0}}},
+ {?eh,test_stats,{0,0,{0,1}}},
{?eh,tc_start,{config_func_error_1_SUITE,exit_in_eptc}},
{?eh,tc_done,{config_func_error_1_SUITE,exit_in_eptc,'_'}},
- {?eh,test_stats,{0,2,{0,0}}},
+ {?eh,test_stats,{1,0,{0,1}}},
[{?eh,tc_start,{config_func_error_1_SUITE,{init_per_group,g1,[]}}},
{?eh,tc_done,{config_func_error_1_SUITE,{init_per_group,g1,[]},ok}},
{?eh,tc_start,{config_func_error_1_SUITE,exit_in_iptc}},
{?eh,tc_done,{config_func_error_1_SUITE,exit_in_iptc,'_'}},
- {?eh,test_stats,{0,3,{0,0}}},
+ {?eh,test_stats,{1,0,{0,2}}},
{?eh,tc_start,{config_func_error_1_SUITE,{end_per_group,g1,[]}}},
{?eh,tc_done,{config_func_error_1_SUITE,{end_per_group,g1,[]},ok}}],
@@ -1549,7 +1549,7 @@ test_events(config_func_errors) ->
{?eh,tc_done,{config_func_error_1_SUITE,{init_per_group,g2,[]},ok}},
{?eh,tc_start,{config_func_error_1_SUITE,exit_in_eptc}},
{?eh,tc_done,{config_func_error_1_SUITE,exit_in_eptc,'_'}},
- {?eh,test_stats,{0,4,{0,0}}},
+ {?eh,test_stats,{2,0,{0,2}}},
{?eh,tc_start,{config_func_error_1_SUITE,{end_per_group,g2,[]}}},
{?eh,tc_done,{config_func_error_1_SUITE,{end_per_group,g2,[]},ok}}],
diff --git a/lib/common_test/test/ct_hooks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE.erl
index 690d0af1bb..93bcb8fe52 100644
--- a/lib/common_test/test/ct_hooks_SUITE.erl
+++ b/lib/common_test/test/ct_hooks_SUITE.erl
@@ -70,22 +70,25 @@ suite() ->
all() ->
all(suite).
-all(suite) ->
+all(suite) ->
lists:reverse(
[
one_cth, two_cth, faulty_cth_no_init, faulty_cth_id_no_init,
faulty_cth_exit_in_init, faulty_cth_exit_in_id,
- faulty_cth_exit_in_init_scope_suite, minimal_cth,
- minimal_and_maximal_cth, faulty_cth_undef,
+ faulty_cth_exit_in_init_scope_suite, minimal_cth,
+ minimal_and_maximal_cth, faulty_cth_undef,
scope_per_suite_cth, scope_per_group_cth, scope_suite_cth,
- scope_per_suite_state_cth, scope_per_group_state_cth,
+ scope_per_suite_state_cth, scope_per_group_state_cth,
scope_suite_state_cth,
fail_pre_suite_cth, double_fail_pre_suite_cth,
fail_post_suite_cth, skip_pre_suite_cth, skip_pre_end_cth,
+ skip_pre_init_tc_cth,
skip_post_suite_cth, recover_post_suite_cth, update_config_cth,
- state_update_cth, options_cth, same_id_cth,
+ state_update_cth, options_cth, same_id_cth,
fail_n_skip_with_minimal_cth, prio_cth, no_config,
- data_dir, cth_log
+ no_init_suite_config, no_init_config, no_end_config,
+ failed_sequence, repeat_force_stop, config_clash,
+ callbacks_on_skip, fallback, data_dir, cth_log
]
).
@@ -96,10 +99,10 @@ all(suite) ->
%%%-----------------------------------------------------------------
%%%
-one_cth(Config) when is_list(Config) ->
+one_cth(Config) when is_list(Config) ->
do_test(one_empty_cth, "ct_cth_empty_SUITE.erl",[empty_cth], Config).
-two_cth(Config) when is_list(Config) ->
+two_cth(Config) when is_list(Config) ->
do_test(two_empty_cth, "ct_cth_empty_SUITE.erl",[empty_cth,empty_cth],
Config).
@@ -119,13 +122,13 @@ minimal_cth(Config) when is_list(Config) ->
minimal_and_maximal_cth(Config) when is_list(Config) ->
do_test(minimal_and_maximal_cth, "ct_cth_empty_SUITE.erl",
[minimal_cth, empty_cth],Config).
-
+
faulty_cth_undef(Config) when is_list(Config) ->
do_test(faulty_cth_undef, "ct_cth_empty_SUITE.erl",
[undef_cth],Config).
faulty_cth_exit_in_init_scope_suite(Config) when is_list(Config) ->
- do_test(faulty_cth_exit_in_init_scope_suite,
+ do_test(faulty_cth_exit_in_init_scope_suite,
"ct_exit_in_init_scope_suite_cth_SUITE.erl",
[],Config).
@@ -190,6 +193,10 @@ skip_post_suite_cth(Config) when is_list(Config) ->
do_test(skip_post_suite_cth, "ct_cth_empty_SUITE.erl",
[skip_post_suite_cth],Config).
+skip_pre_init_tc_cth(Config) ->
+ do_test(skip_pre_init_tc_cth, "ct_cth_empty_SUITE.erl",
+ [skip_pre_init_tc_cth],Config).
+
recover_post_suite_cth(Config) when is_list(Config) ->
do_test(recover_post_suite_cth, "ct_cth_fail_per_suite_SUITE.erl",
[recover_post_suite_cth],Config).
@@ -205,7 +212,7 @@ state_update_cth(Config) when is_list(Config) ->
options_cth(Config) when is_list(Config) ->
do_test(options_cth, "ct_cth_empty_SUITE.erl",
[{empty_cth,[test]}],Config).
-
+
same_id_cth(Config) when is_list(Config) ->
do_test(same_id_cth, "ct_cth_empty_SUITE.erl",
[same_id_cth,same_id_cth],Config).
@@ -223,13 +230,24 @@ no_config(Config) when is_list(Config) ->
do_test(no_config, "ct_no_config_SUITE.erl",
[verify_config_cth],Config).
+no_init_suite_config(Config) when is_list(Config) ->
+ do_test(no_init_suite_config, "ct_no_init_suite_config_SUITE.erl",
+ [empty_cth],Config).
+
+no_init_config(Config) when is_list(Config) ->
+ do_test(no_init_config, "ct_no_init_config_SUITE.erl",[empty_cth],Config).
+
+no_end_config(Config) when is_list(Config) ->
+ do_test(no_end_config, "ct_no_end_config_SUITE.erl",[empty_cth],Config).
+
data_dir(Config) when is_list(Config) ->
do_test(data_dir, "ct_data_dir_SUITE.erl",
[verify_data_dir_cth],Config).
-cth_log(Config) when is_list(Config) ->
+cth_log(Config) when is_list(Config) ->
%% test that cth_log_redirect writes properly to
%% unexpected I/O log
+ ct:timetrap({minutes,10}),
StartOpts = do_test(cth_log, "cth_log_SUITE.erl", [], Config),
Logdir = proplists:get_value(logdir, StartOpts),
UnexpIoLogs =
@@ -253,29 +271,57 @@ cth_log(Config) when is_list(Config) ->
end, UnexpIoLogs),
ok.
+%% OTP-10599 adds the Suite argument as first argument to all hook
+%% callbacks that did not have a Suite argument from before. This test
+%% checks that ct_hooks will fall back to old versions of callbacks if
+%% new versions are not exported.
+fallback(Config) ->
+ do_test(fallback, "all_hook_callbacks_SUITE.erl",[fallback_cth], Config).
+
+%% Test that expected callbacks, and only those, are called when tests
+%% are skipped in different ways
+callbacks_on_skip(Config) ->
+ do_test(callbacks_on_skip, {spec,"skip.spec"},[skip_cth], Config).
+
+%% Test that expected callbacks, and only those, are called when tests
+%% are skipped due to failed sequence
+failed_sequence(Config) ->
+ do_test(failed_sequence, "seq_SUITE.erl", [skip_cth], Config).
+
+%% Test that expected callbacks, and only those, are called when tests
+%% are skipped due to {force_stop,skip_rest} option
+repeat_force_stop(Config) ->
+ do_test(repeat_force_stop, "repeat_SUITE.erl", [skip_cth], Config, ok, 2,
+ [{force_stop,skip_rest},{duration,"000009"}]).
+
+%% Test that expected callbacks, and only those, are called when a test
+%% are fails due to clash in config alias names
+config_clash(Config) ->
+ do_test(config_clash, "config_clash_SUITE.erl", [skip_cth], Config).
%%%-----------------------------------------------------------------
%%% HELP FUNCTIONS
%%%-----------------------------------------------------------------
-do_test(Tag, SWC, CTHs, Config) ->
- do_test(Tag, SWC, CTHs, Config, ok).
-do_test(Tag, SWC, CTHs, Config, {error,_} = Res) ->
- do_test(Tag, SWC, CTHs, Config, Res, 1);
-do_test(Tag, SWC, CTHs, Config, Res) ->
- do_test(Tag, SWC, CTHs, Config, Res, 2).
+do_test(Tag, WTT, CTHs, Config) ->
+ do_test(Tag, WTT, CTHs, Config, ok).
+do_test(Tag, WTT, CTHs, Config, {error,_} = Res) ->
+ do_test(Tag, WTT, CTHs, Config, Res, 1,[]);
+do_test(Tag, WTT, CTHs, Config, Res) ->
+ do_test(Tag, WTT, CTHs, Config, Res, 2,[]).
-do_test(Tag, SuiteWildCard, CTHs, Config, Res, EC) ->
-
+do_test(Tag, WhatToTest, CTHs, Config, Res, EC, ExtraOpts) when is_list(WhatToTest) ->
+ do_test(Tag, {suite,WhatToTest}, CTHs, Config, Res, EC, ExtraOpts);
+do_test(Tag, {WhatTag,Wildcard}, CTHs, Config, Res, EC, ExtraOpts) ->
DataDir = ?config(data_dir, Config),
- Suites = filelib:wildcard(
- filename:join([DataDir,"cth/tests",SuiteWildCard])),
- {Opts,ERPid} = setup([{suite,Suites},
- {ct_hooks,CTHs},{label,Tag}], Config),
+ Files = filelib:wildcard(
+ filename:join([DataDir,"cth/tests",Wildcard])),
+ {Opts,ERPid} =
+ setup([{WhatTag,Files},{ct_hooks,CTHs},{label,Tag}|ExtraOpts], Config),
Res = ct_test_support:run(Opts, Config),
Events = ct_test_support:get_events(ERPid, Config),
- ct_test_support:log_events(Tag,
+ ct_test_support:log_events(Tag,
reformat(Events, ?eh),
?config(priv_dir, Config),
Opts),
@@ -323,12 +369,12 @@ test_events(one_empty_cth) ->
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}},
{?eh,tc_start,{ct_cth_empty_SUITE,test_case}},
- {?eh,cth,{empty_cth,pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{empty_cth,post_init_per_testcase,[test_case,'$proplist','_',[]]}},
- {?eh,cth,{empty_cth,pre_end_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{empty_cth,post_end_per_testcase,[test_case,'$proplist','_',[]]}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist','_',[]]}},
+ {?eh,cth,{empty_cth,pre_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist','_',[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{empty_cth,pre_end_per_suite,
[ct_cth_empty_SUITE,'$proplist',[]]}},
@@ -355,12 +401,12 @@ test_events(two_empty_cth) ->
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}},
{?eh,tc_start,{ct_cth_empty_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
@@ -402,7 +448,7 @@ test_events(minimal_cth) ->
{?eh,tc_start,{ct_cth_empty_SUITE,test_case}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,tc_done,{ct_cth_empty_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
@@ -423,10 +469,10 @@ test_events(minimal_and_maximal_cth) ->
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}},
{?eh,tc_start,{ct_cth_empty_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_suite,[ct_cth_empty_SUITE,'$proplist','_',[]]}},
@@ -452,11 +498,11 @@ test_events(faulty_cth_undef) ->
{?eh,tc_auto_skip,{ct_cth_empty_SUITE,test_case,
{failed, FailReason}}},
{?eh,cth,{'_',on_tc_skip,'_'}},
-
+
{?eh,tc_auto_skip,{ct_cth_empty_SUITE,end_per_suite,
{failed, FailReason}}},
{?eh,cth,{'_',on_tc_skip,'_'}},
-
+
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
@@ -512,10 +558,10 @@ test_events(scope_per_suite_cth) ->
{?eh,tc_done,{ct_scope_per_suite_cth_SUITE,init_per_suite,ok}},
{?eh,tc_start,{ct_scope_per_suite_cth_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_suite_cth_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_suite_cth_SUITE,test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_scope_per_suite_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_suite_cth_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,
[ct_scope_per_suite_cth_SUITE,'$proplist',[]]}},
@@ -538,10 +584,10 @@ test_events(scope_suite_cth) ->
{?eh,tc_done,{ct_scope_suite_cth_SUITE,init_per_suite,ok}},
{?eh,tc_start,{ct_scope_suite_cth_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_suite_cth_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_scope_suite_cth_SUITE,test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_scope_suite_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_suite_cth_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_scope_suite_cth_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',post_end_per_suite,[ct_scope_suite_cth_SUITE,'$proplist','_',[]]}},
@@ -561,20 +607,20 @@ test_events(scope_per_group_cth) ->
[{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]}}},
{?eh,cth,{'_',id,[[]]}},
{?eh,cth,{'_',init,['_',[]]}},
- {?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[]]}},
+ {?eh,cth,{'_',post_init_per_group,[ct_scope_per_group_cth_SUITE,group1, '$proplist','$proplist',[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]},ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_group_cth_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_group_cth_SUITE,test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]}}},
- {?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[]]}},
+ {?eh,cth,{'_',pre_end_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist','_',[]]}},
{?eh,cth,{'_',terminate,[[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]},ok}}],
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,end_per_suite}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
@@ -592,10 +638,10 @@ test_events(scope_per_suite_state_cth) ->
{?eh,tc_done,{ct_scope_per_suite_state_cth_SUITE,init_per_suite,ok}},
{?eh,tc_start,{ct_scope_per_suite_state_cth_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_suite_state_cth_SUITE,test_case,'$proplist',[test]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_suite_state_cth_SUITE,test_case,'$proplist',ok,[test]]}},
{?eh,tc_done,{ct_scope_per_suite_state_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_suite_state_cth_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,
[ct_scope_per_suite_state_cth_SUITE,'$proplist',[test]]}},
@@ -618,10 +664,10 @@ test_events(scope_suite_state_cth) ->
{?eh,tc_done,{ct_scope_suite_state_cth_SUITE,init_per_suite,ok}},
{?eh,tc_start,{ct_scope_suite_state_cth_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_suite_state_cth_SUITE,test_case,'$proplist',[test]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_scope_suite_state_cth_SUITE,test_case,'$proplist',ok,[test]]}},
{?eh,tc_done,{ct_scope_suite_state_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_suite_state_cth_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_scope_suite_state_cth_SUITE,'$proplist',[test]]}},
{?eh,cth,{'_',post_end_per_suite,[ct_scope_suite_state_cth_SUITE,'$proplist','_',[test]]}},
@@ -641,20 +687,20 @@ test_events(scope_per_group_state_cth) ->
[{?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,{init_per_group,group1,[]}}},
{?eh,cth,{'_',id,[[test]]}},
{?eh,cth,{'_',init,['_',[test]]}},
- {?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[test]]}},
+ {?eh,cth,{'_',post_init_per_group,[ct_scope_per_group_state_cth_SUITE,group1,'$proplist','$proplist',[test]]}},
{?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,{init_per_group,group1,[]},ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[test]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[test]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_group_state_cth_SUITE,test_case,'$proplist',[test]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_group_state_cth_SUITE,test_case,'$proplist',ok,[test]]}},
{?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,{end_per_group,group1,[]}}},
- {?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[test]]}},
- {?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[test]]}},
+ {?eh,cth,{'_',pre_end_per_group,[ct_scope_per_group_state_cth_SUITE,group1,'$proplist',[test]]}},
+ {?eh,cth,{'_',post_end_per_group,[ct_scope_per_group_state_cth_SUITE,group1,'$proplist','_',[test]]}},
{?eh,cth,{'_',terminate,[[test]]}},
{?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,{end_per_group,group1,[]},ok}}],
-
+
{?eh,tc_start,{ct_scope_per_group_state_cth_SUITE,end_per_suite}},
{?eh,tc_done,{ct_scope_per_group_state_cth_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
@@ -666,7 +712,7 @@ test_events(fail_pre_suite_cth) ->
{?eh,start_logging,{'DEF','RUNDIR'}},
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,cth,{'_',init,['_',[]]}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,init_per_suite}},
{?eh,cth,{'_',pre_init_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist',
@@ -674,27 +720,27 @@ test_events(fail_pre_suite_cth) ->
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,
{failed, {error,"Test failure"}}}},
{?eh,cth,{'_',on_tc_fail,
- [init_per_suite,{failed,"Test failure"},[]]}},
+ [ct_cth_empty_SUITE,init_per_suite,"Test failure",[]]}},
+
-
{?eh,tc_auto_skip,{ct_cth_empty_SUITE,test_case,
{failed,{ct_cth_empty_SUITE,init_per_suite,
{failed,"Test failure"}}}}},
{?eh,cth,{'_',on_tc_skip,
- [test_case, {tc_auto_skip,
+ [ct_cth_empty_SUITE,test_case, {tc_auto_skip,
{failed, {ct_cth_empty_SUITE, init_per_suite,
{failed, "Test failure"}}}},[]]}},
-
+
{?eh,tc_auto_skip, {ct_cth_empty_SUITE, end_per_suite,
{failed, {ct_cth_empty_SUITE, init_per_suite,
{failed, "Test failure"}}}}},
{?eh,cth,{'_',on_tc_skip,
- [end_per_suite, {tc_auto_skip,
+ [ct_cth_empty_SUITE,end_per_suite, {tc_auto_skip,
{failed, {ct_cth_empty_SUITE, init_per_suite,
{failed, "Test failure"}}}},[]]}},
-
+
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,cth, {'_',terminate,[[]]}},
{?eh,stop_logging,[]}
@@ -727,17 +773,17 @@ test_events(fail_post_suite_cth) ->
{?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist','$proplist',[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,
{failed,{error,"Test failure"}}}},
- {?eh,cth,{'_',on_tc_fail,[init_per_suite, {failed,"Test failure"}, []]}},
+ {?eh,cth,{'_',on_tc_fail,[ct_cth_empty_SUITE,init_per_suite, "Test failure", []]}},
{?eh,tc_auto_skip,{ct_cth_empty_SUITE,test_case,
{failed,{ct_cth_empty_SUITE,init_per_suite,
{failed,"Test failure"}}}}},
- {?eh,cth,{'_',on_tc_skip,[test_case,{tc_auto_skip,'_'},[]]}},
-
+ {?eh,cth,{'_',on_tc_skip,[ct_cth_empty_SUITE,test_case,{tc_auto_skip,'_'},[]]}},
+
{?eh,tc_auto_skip, {ct_cth_empty_SUITE, end_per_suite,
{failed, {ct_cth_empty_SUITE, init_per_suite,
{failed, "Test failure"}}}}},
- {?eh,cth,{'_',on_tc_skip,[end_per_suite,{tc_auto_skip,'_'},[]]}},
+ {?eh,cth,{'_',on_tc_skip,[ct_cth_empty_SUITE,end_per_suite,{tc_auto_skip,'_'},[]]}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,cth, {'_',terminate,[[]]}},
@@ -754,11 +800,11 @@ test_events(skip_pre_suite_cth) ->
{?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist',{skip,"Test skip"},[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,{skipped,"Test skip"}}},
{?eh,cth,{'_',on_tc_skip,
- [init_per_suite,{tc_user_skip,{skipped,"Test skip"}},[]]}},
+ [ct_cth_empty_SUITE,init_per_suite,{tc_user_skip,"Test skip"},[]]}},
{?eh,tc_user_skip,{ct_cth_empty_SUITE,test_case,"Test skip"}},
- {?eh,cth,{'_',on_tc_skip,[test_case,{tc_user_skip,"Test skip"},[]]}},
-
+ {?eh,cth,{'_',on_tc_skip,[ct_cth_empty_SUITE,test_case,{tc_user_skip,"Test skip"},[]]}},
+
{?eh,tc_user_skip, {ct_cth_empty_SUITE, end_per_suite,"Test skip"}},
{?eh,test_done,{'DEF','STOP_TIME'}},
@@ -772,31 +818,33 @@ test_events(skip_pre_end_cth) ->
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,init_per_suite}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,init_per_suite,ok}},
-
+
[{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]}}},
{?eh,cth,{'_',id,[[]]}},
{?eh,cth,{'_',init,['_',[]]}},
- {?eh,cth,{'_',post_init_per_group,[group1,'$proplist','$proplist',[]]}},
+ {?eh,cth,{'_',post_init_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist','$proplist',[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,{init_per_group,group1,[]},ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_scope_per_group_cth_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_scope_per_group_cth_SUITE,test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]}}},
- {?eh,cth,{'_',pre_end_per_group,[group1,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_group,[group1,'$proplist','_',[]]}},
+ {?eh,cth,{'_',pre_end_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_group,[ct_scope_per_group_cth_SUITE,group1,'$proplist','_',[]]}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,{end_per_group,group1,[]},
{skipped,"Test skip"}}}],
- {?eh,cth,{'_',on_tc_skip,[{end_per_group,group1},
- {tc_user_skip,{skipped,"Test skip"}},
+ {?eh,cth,{'_',on_tc_skip,[ct_scope_per_group_cth_SUITE,
+ {end_per_group,group1},
+ {tc_user_skip,"Test skip"},
[]]}},
{?eh,tc_start,{ct_scope_per_group_cth_SUITE,end_per_suite}},
{?eh,tc_done,{ct_scope_per_group_cth_SUITE,end_per_suite,
{skipped,"Test skip"}}},
- {?eh,cth,{'_',on_tc_skip,[end_per_suite,
- {tc_user_skip,{skipped,"Test skip"}},
+ {?eh,cth,{'_',on_tc_skip,[ct_scope_per_group_cth_SUITE,
+ end_per_suite,
+ {tc_user_skip,"Test skip"},
[]]}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,cth,{'_',terminate,[[]]}},
@@ -808,24 +856,59 @@ test_events(skip_post_suite_cth) ->
{?eh,start_logging,{'DEF','RUNDIR'}},
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,cth,{'_',init,['_',[]]}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,init_per_suite}},
{?eh,cth,{'_',pre_init_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{?eh,cth,{'_',post_init_per_suite,[ct_cth_empty_SUITE,'$proplist','$proplist',[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,{skipped,"Test skip"}}},
{?eh,cth,{'_',on_tc_skip,
- [init_per_suite,{tc_user_skip,{skipped,"Test skip"}},[]]}},
+ [ct_cth_empty_SUITE,init_per_suite,{tc_user_skip,"Test skip"},[]]}},
{?eh,tc_user_skip,{ct_cth_empty_SUITE,test_case,"Test skip"}},
- {?eh,cth,{'_',on_tc_skip,[test_case,{tc_user_skip,"Test skip"},[]]}},
-
+ {?eh,cth,{'_',on_tc_skip,[ct_cth_empty_SUITE,test_case,{tc_user_skip,"Test skip"},[]]}},
+
{?eh,tc_user_skip, {ct_cth_empty_SUITE, end_per_suite,"Test skip"}},
-
+
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,cth,{'_',terminate,[[]]}},
{?eh,stop_logging,[]}
];
+test_events(skip_pre_init_tc_cth) ->
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,init,['_',[]]}},
+ {?eh,start_info,{1,1,1}},
+ {?eh,tc_start,{ct_cth_empty_SUITE,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [ct_cth_empty_SUITE,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}},
+ {?eh,tc_start,{ct_cth_empty_SUITE,test_case}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [ct_cth_empty_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [ct_cth_empty_SUITE,test_case,'$proplist',
+ {skip,"Skipped in pre_init_per_testcase"},
+ []]}},
+ {?eh,tc_done,{ct_cth_empty_SUITE,test_case,
+ {skipped,"Skipped in pre_init_per_testcase"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [ct_cth_empty_SUITE,test_case,
+ {tc_user_skip,"Skipped in pre_init_per_testcase"},
+ []]}},
+ {?eh,test_stats,{0,0,{1,0}}},
+ {?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,
+ [ct_cth_empty_SUITE,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_cth_empty_SUITE,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ];
+
test_events(recover_post_suite_cth) ->
Suite = ct_cth_fail_per_suite_SUITE,
[
@@ -840,11 +923,11 @@ test_events(recover_post_suite_cth) ->
{?eh,tc_start,{Suite,test_case}},
{?eh,cth,{'_',pre_init_per_testcase,
- [test_case, not_contains([tc_status]),[]]}},
+ [Suite,test_case, not_contains([tc_status]),[]]}},
{?eh,cth,{'_',post_end_per_testcase,
- [test_case, contains([tc_status]),'_',[]]}},
+ [Suite,test_case, contains([tc_status]),'_',[]]}},
{?eh,tc_done,{Suite,test_case,ok}},
-
+
{?eh,tc_start,{Suite,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,
[Suite,not_contains([tc_status]),[]]}},
@@ -861,7 +944,7 @@ test_events(update_config_cth) ->
{?eh,start_logging,{'DEF','RUNDIR'}},
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
{?eh,cth,{'_',init,['_',[]]}},
-
+
{?eh,tc_start,{ct_update_config_SUITE,init_per_suite}},
{?eh,cth,{'_',pre_init_per_suite,
[ct_update_config_SUITE,contains([]),[]]}},
@@ -876,13 +959,15 @@ test_events(update_config_cth) ->
{?eh,tc_start,{ct_update_config_SUITE, {init_per_group,group1,[]}}},
{?eh,cth,{'_',pre_init_per_group,
- [group1,contains(
+ [ct_update_config_SUITE,
+ group1,contains(
[post_init_per_suite,
init_per_suite,
pre_init_per_suite]),
[]]}},
{?eh,cth,{'_',post_init_per_group,
- [group1,
+ [ct_update_config_SUITE,
+ group1,
contains(
[post_init_per_suite,
init_per_suite,
@@ -898,7 +983,8 @@ test_events(update_config_cth) ->
{?eh,tc_start,{ct_update_config_SUITE,test_case}},
{?eh,cth,{'_',pre_init_per_testcase,
- [test_case,contains(
+ [ct_update_config_SUITE,
+ test_case,contains(
[post_init_per_group,
init_per_group,
pre_init_per_group,
@@ -907,7 +993,8 @@ test_events(update_config_cth) ->
pre_init_per_suite]),
[]]}},
{?eh,cth,{'_',post_end_per_testcase,
- [test_case,contains(
+ [ct_update_config_SUITE,
+ test_case,contains(
[init_per_testcase,
pre_init_per_testcase,
post_init_per_group,
@@ -921,7 +1008,8 @@ test_events(update_config_cth) ->
{?eh,tc_start,{ct_update_config_SUITE, {end_per_group,group1,[]}}},
{?eh,cth,{'_',pre_end_per_group,
- [group1,contains(
+ [ct_update_config_SUITE,
+ group1,contains(
[post_init_per_group,
init_per_group,
pre_init_per_group,
@@ -930,7 +1018,8 @@ test_events(update_config_cth) ->
pre_init_per_suite]),
[]]}},
{?eh,cth,{'_',post_end_per_group,
- [group1,
+ [ct_update_config_SUITE,
+ group1,
contains(
[pre_end_per_group,
post_init_per_group,
@@ -941,7 +1030,7 @@ test_events(update_config_cth) ->
pre_init_per_suite]),
ok,[]]}},
{?eh,tc_done,{ct_update_config_SUITE,{end_per_group,group1,[]},ok}},
-
+
{?eh,tc_start,{ct_update_config_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,
[ct_update_config_SUITE,contains(
@@ -974,7 +1063,7 @@ test_events(state_update_cth) ->
{?eh,cth,{'_',init,['_',[]]}},
{?eh,cth,{'_',init,['_',[]]}},
{?eh,tc_start,{'_',init_per_suite}},
-
+
{?eh,tc_done,{'_',end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,cth,{'_',terminate,[contains(
@@ -1018,10 +1107,10 @@ test_events(options_cth) ->
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}},
{?eh,tc_start,{ct_cth_empty_SUITE,test_case}},
- {?eh,cth,{empty_cth,pre_init_per_testcase,[test_case,'$proplist',[test]]}},
- {?eh,cth,{empty_cth,post_end_per_testcase,[test_case,'$proplist','_',[test]]}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[test]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist','_',[test]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{empty_cth,pre_end_per_suite,
[ct_cth_empty_SUITE,'$proplist',[test]]}},
@@ -1051,14 +1140,14 @@ test_events(same_id_cth) ->
{?eh,tc_done,{ct_cth_empty_SUITE,init_per_suite,ok}}},
{?eh,tc_start,{ct_cth_empty_SUITE,test_case}},
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}},
{negative,
- {?eh,cth,{'_',pre_init_per_testcase,[test_case,'$proplist',[]]}},
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}}},
+ {?eh,cth,{'_',pre_init_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}}},
{negative,
- {?eh,cth,{'_',post_end_per_testcase,[test_case,'$proplist',ok,[]]}},
+ {?eh,cth,{'_',post_end_per_testcase,[ct_cth_empty_SUITE,test_case,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_cth_empty_SUITE,test_case,ok}}},
-
+
{?eh,tc_start,{ct_cth_empty_SUITE,end_per_suite}},
{?eh,cth,{'_',pre_end_per_suite,[ct_cth_empty_SUITE,'$proplist',[]]}},
{negative,
@@ -1094,11 +1183,13 @@ test_events(fail_n_skip_with_minimal_cth) ->
{?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case2,{skipped,"skip it"}}},
{?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,test_case3}},
{?eh,tc_done,{ct_cth_fail_one_skip_one_SUITE,test_case3,{skipped,"skip it"}}},
- {?eh,cth,{empty_cth,on_tc_skip,[{test_case2,group2},
- {tc_user_skip,{skipped,"skip it"}},
+ {?eh,cth,{empty_cth,on_tc_skip,[ct_cth_fail_one_skip_one_SUITE,
+ {test_case2,group2},
+ {tc_user_skip,"skip it"},
[]]}},
- {?eh,cth,{empty_cth,on_tc_skip,[{test_case3,group2},
- {tc_user_skip,{skipped,"skip it"}},
+ {?eh,cth,{empty_cth,on_tc_skip,[ct_cth_fail_one_skip_one_SUITE,
+ {test_case3,group2},
+ {tc_user_skip,"skip it"},
[]]}},
{?eh,tc_start,{ct_cth_fail_one_skip_one_SUITE,{end_per_group,
group2,[parallel]}}},
@@ -1115,17 +1206,25 @@ test_events(fail_n_skip_with_minimal_cth) ->
];
test_events(prio_cth) ->
-
- GenPre = fun(Func,States) ->
- [{?eh,cth,{'_',Func,['_','_',State]}} ||
+ GenPre = fun(Func,States) when Func==pre_init_per_suite;
+ Func==pre_end_per_suite ->
+ [{?eh,cth,{'_',Func,['_','_',State]}} ||
+ State <- States];
+ (Func,States) ->
+ [{?eh,cth,{'_',Func,['_','_','_',State]}} ||
State <- States]
end,
- GenPost = fun(Func,States) ->
- [{?eh,cth,{'_',Func,['_','_','_',State]}} ||
+ GenPost = fun(Func,States) when Func==post_init_per_suite;
+ Func==post_end_per_suite ->
+ [{?eh,cth,{'_',Func,['_','_','_',State]}} ||
+ State <- States];
+ (Func,States) ->
+ [{?eh,cth,{'_',Func,['_','_','_','_',State]}} ||
State <- States]
- end,
-
+
+ end,
+
[{?eh,start_logging,{'DEF','RUNDIR'}},
{?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}}] ++
@@ -1136,7 +1235,7 @@ test_events(prio_cth) ->
[[1100,100],[600,200],[600,600],[700],[800],[900],[1000],
[1200,1050],[1100],[1200]]) ++
[{?eh,tc_done,{ct_cth_prio_SUITE,init_per_suite,ok}},
-
+
[{?eh,tc_start,{ct_cth_prio_SUITE,{init_per_group,'_',[]}}}] ++
GenPre(pre_init_per_group,
@@ -1147,7 +1246,7 @@ test_events(prio_cth) ->
[900],[900,900],[500,900],[1000],[1200,1050],
[1100],[1200]]) ++
[{?eh,tc_done,{ct_cth_prio_SUITE,{init_per_group,'_',[]},ok}}] ++
-
+
[{?eh,tc_start,{ct_cth_prio_SUITE,test_case}}] ++
GenPre(pre_init_per_testcase,
[[1100,100],[600,200],[600,600],[600],[700],[800],
@@ -1161,7 +1260,7 @@ test_events(prio_cth) ->
[{?eh,tc_done,{ct_cth_prio_SUITE,test_case,ok}},
{?eh,tc_start,{ct_cth_prio_SUITE,{end_per_group,'_',[]}}}] ++
- GenPre(pre_end_per_group,
+ GenPre(pre_end_per_group,
lists:reverse(
[[1100,100],[600,200],[600,600],[600],[700],[800],
[900],[900,900],[500,900],[1000],[1200,1050],
@@ -1200,30 +1299,30 @@ test_events(no_config) ->
{?eh,tc_done,{ct_framework,init_per_suite,ok}},
{?eh,tc_start,{ct_no_config_SUITE,test_case_1}},
{?eh,cth,{empty_cth,pre_init_per_testcase,
- [test_case_1,'$proplist',[]]}},
+ [ct_no_config_SUITE,test_case_1,'$proplist',[]]}},
{?eh,cth,{empty_cth,post_end_per_testcase,
- [test_case_1,'$proplist',ok,[]]}},
+ [ct_no_config_SUITE,test_case_1,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_no_config_SUITE,test_case_1,ok}},
{?eh,test_stats,{1,0,{0,0}}},
[{?eh,tc_start,{ct_framework,{init_per_group,test_group,'$proplist'}}},
{?eh,cth,{empty_cth,pre_init_per_group,
- [test_group,'$proplist',[]]}},
+ [ct_no_config_SUITE,test_group,'$proplist',[]]}},
{?eh,cth,{empty_cth,post_init_per_group,
- [test_group,'$proplist','$proplist',[]]}},
+ [ct_no_config_SUITE,test_group,'$proplist','$proplist',[]]}},
{?eh,tc_done,{ct_framework,
{init_per_group,test_group,'$proplist'},ok}},
{?eh,tc_start,{ct_no_config_SUITE,test_case_2}},
{?eh,cth,{empty_cth,pre_init_per_testcase,
- [test_case_2,'$proplist',[]]}},
+ [ct_no_config_SUITE,test_case_2,'$proplist',[]]}},
{?eh,cth,{empty_cth,post_end_per_testcase,
- [test_case_2,'$proplist',ok,[]]}},
+ [ct_no_config_SUITE,test_case_2,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_no_config_SUITE,test_case_2,ok}},
{?eh,test_stats,{2,0,{0,0}}},
{?eh,tc_start,{ct_framework,{end_per_group,test_group,'$proplist'}}},
{?eh,cth,{empty_cth,pre_end_per_group,
- [test_group,'$proplist',[]]}},
+ [ct_no_config_SUITE,test_group,'$proplist',[]]}},
{?eh,cth,{empty_cth,post_end_per_group,
- [test_group,'$proplist',ok,[]]}},
+ [ct_no_config_SUITE,test_group,'$proplist',ok,[]]}},
{?eh,tc_done,{ct_framework,{end_per_group,test_group,'$proplist'},ok}}],
{?eh,tc_start,{ct_framework,end_per_suite}},
{?eh,cth,{empty_cth,pre_end_per_suite,
@@ -1236,6 +1335,166 @@ test_events(no_config) ->
{?eh,stop_logging,[]}
];
+test_events(no_init_suite_config) ->
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}},
+ {?eh,start_info,{1,1,1}},
+ {?eh,tc_start,{ct_no_init_suite_config_SUITE,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,
+ [ct_no_init_suite_config_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [ct_no_init_suite_config_SUITE,'$proplist','_',[]]}},
+ {?eh,tc_done,{ct_no_init_suite_config_SUITE,init_per_suite,
+ {failed,{error,{undef,'_'}}}}},
+ {?eh,cth,{empty_cth,on_tc_fail,[ct_no_init_suite_config_SUITE,
+ init_per_suite,
+ {undef,'_'},[]]}},
+ {?eh,tc_auto_skip,{ct_no_init_suite_config_SUITE,test_case,
+ {failed,{ct_no_init_suite_config_SUITE,init_per_suite,
+ {'EXIT',{undef,'_'}}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [ct_no_init_suite_config_SUITE,
+ test_case,
+ {tc_auto_skip,
+ {failed,{ct_no_init_suite_config_SUITE,init_per_suite,
+ {'EXIT',{undef,'_'}}}}},
+ []]}},
+ {?eh,test_stats,{0,0,{0,1}}},
+ {?eh,tc_auto_skip,{ct_no_init_suite_config_SUITE,end_per_suite,
+ {failed,{ct_no_init_suite_config_SUITE,init_per_suite,
+ {'EXIT',{undef,'_'}}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [ct_no_init_suite_config_SUITE,
+ end_per_suite,
+ {tc_auto_skip,
+ {failed,{ct_no_init_suite_config_SUITE,init_per_suite,
+ {'EXIT',{undef,'_'}}}}},
+ []]}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ];
+
+test_events(no_init_config) ->
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}},
+ {?eh,start_info,{1,1,2}},
+ {?eh,tc_start,{ct_no_init_config_SUITE,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,
+ [ct_no_init_config_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [ct_no_init_config_SUITE,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{ct_no_init_config_SUITE,init_per_suite,ok}},
+ {?eh,tc_start,{ct_no_init_config_SUITE,test_case_1}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [ct_no_init_config_SUITE,test_case_1,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [ct_no_init_config_SUITE,test_case_1,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_no_init_config_SUITE,test_case_1,ok}},
+ {?eh,test_stats,{1,0,{0,0}}},
+ [{?eh,tc_start,{ct_no_init_config_SUITE,{init_per_group,test_group,[]}}},
+ {?eh,cth,{empty_cth,pre_init_per_group,
+ [ct_no_init_config_SUITE,test_group,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_group,
+ [ct_no_init_config_SUITE,test_group,'$proplist','_',[]]}},
+ {?eh,tc_done,{ct_no_init_config_SUITE,{init_per_group,test_group,[]},
+ {failed,{error,{undef,'_'}}}}},
+ {?eh,cth,{empty_cth,on_tc_fail,[ct_no_init_config_SUITE,
+ {init_per_group,test_group},
+ {undef,'_'},[]]}},
+ {?eh,tc_auto_skip,{ct_no_init_config_SUITE,{test_case_2,test_group},
+ {failed,{ct_no_init_config_SUITE,init_per_group,
+ {'EXIT',{undef,'_'}}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,[ct_no_init_config_SUITE,
+ {test_case_2,test_group},
+ {tc_auto_skip,
+ {failed,
+ {ct_no_init_config_SUITE,init_per_group,
+ {'EXIT',{undef,'_'}}}}},
+ []]}},
+ {?eh,test_stats,{1,0,{0,1}}},
+ {?eh,tc_auto_skip,{ct_no_init_config_SUITE,{end_per_group,test_group},
+ {failed,{ct_no_init_config_SUITE,init_per_group,
+ {'EXIT',{undef,'_'}}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,[ct_no_init_config_SUITE,
+ {end_per_group,test_group},
+ {tc_auto_skip,
+ {failed,
+ {ct_no_init_config_SUITE,init_per_group,
+ {'EXIT',{undef,'_'}}}}},
+ []]}}],
+ {?eh,tc_start,{ct_no_init_config_SUITE,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,
+ [ct_no_init_config_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,
+ [ct_no_init_config_SUITE,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_no_init_config_SUITE,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ];
+
+test_events(no_end_config) ->
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}},
+ {?eh,start_info,{1,1,2}},
+ {?eh,tc_start,{ct_no_end_config_SUITE,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,
+ [ct_no_end_config_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [ct_no_end_config_SUITE,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{ct_no_end_config_SUITE,init_per_suite,ok}},
+ {?eh,tc_start,{ct_no_end_config_SUITE,test_case_1}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [ct_no_end_config_SUITE,test_case_1,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [ct_no_end_config_SUITE,test_case_1,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_no_end_config_SUITE,test_case_1,ok}},
+ {?eh,test_stats,{1,0,{0,0}}},
+ [{?eh,tc_start,{ct_no_end_config_SUITE,
+ {init_per_group,test_group,'$proplist'}}},
+ {?eh,cth,{empty_cth,pre_init_per_group,
+ [ct_no_end_config_SUITE,test_group,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_group,
+ [ct_no_end_config_SUITE,test_group,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{ct_no_end_config_SUITE,
+ {init_per_group,test_group,'$proplist'},ok}},
+ {?eh,tc_start,{ct_no_end_config_SUITE,test_case_2}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [ct_no_end_config_SUITE,test_case_2,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [ct_no_end_config_SUITE,test_case_2,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_no_end_config_SUITE,test_case_2,ok}},
+ {?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_start,{ct_no_end_config_SUITE,
+ {end_per_group,test_group,'$proplist'}}},
+ {?eh,cth,{empty_cth,pre_end_per_group,
+ [ct_no_end_config_SUITE,test_group,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_group,
+ [ct_no_end_config_SUITE,test_group,'$proplist','_',[]]}},
+ {?eh,tc_done,{ct_no_end_config_SUITE,{end_per_group,test_group,[]},
+ {failed,{error,{undef,'_'}}}}},
+ {?eh,cth,{empty_cth,on_tc_fail,[ct_no_end_config_SUITE,
+ {end_per_group,test_group},
+ {undef,'_'},[]]}}],
+ {?eh,tc_start,{ct_no_end_config_SUITE,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,
+ [ct_no_end_config_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,
+ [ct_no_end_config_SUITE,'$proplist','_',[]]}},
+ {?eh,tc_done,{ct_no_end_config_SUITE,end_per_suite,
+ {failed,{error,{undef,'_'}}}}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ];
+
test_events(data_dir) ->
[
{?eh,start_logging,{'DEF','RUNDIR'}},
@@ -1250,30 +1509,30 @@ test_events(data_dir) ->
{?eh,tc_done,{ct_framework,init_per_suite,ok}},
{?eh,tc_start,{ct_data_dir_SUITE,test_case_1}},
{?eh,cth,{empty_cth,pre_init_per_testcase,
- [test_case_1,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
+ [ct_data_dir_SUITE,test_case_1,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
{?eh,cth,{empty_cth,post_end_per_testcase,
- [test_case_1,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
+ [ct_data_dir_SUITE,test_case_1,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
{?eh,tc_done,{ct_data_dir_SUITE,test_case_1,ok}},
{?eh,test_stats,{1,0,{0,0}}},
[{?eh,tc_start,{ct_framework,{init_per_group,test_group,'$proplist'}}},
{?eh,cth,{empty_cth,pre_init_per_group,
- [test_group,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
+ [ct_data_dir_SUITE,test_group,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
{?eh,cth,{empty_cth,post_init_per_group,
- [test_group,'$proplist','$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
+ [ct_data_dir_SUITE,test_group,'$proplist','$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
{?eh,tc_done,{ct_framework,
{init_per_group,test_group,'$proplist'},ok}},
{?eh,tc_start,{ct_data_dir_SUITE,test_case_2}},
{?eh,cth,{empty_cth,pre_init_per_testcase,
- [test_case_2,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
+ [ct_data_dir_SUITE,test_case_2,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
{?eh,cth,{empty_cth,post_end_per_testcase,
- [test_case_2,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
+ [ct_data_dir_SUITE,test_case_2,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
{?eh,tc_done,{ct_data_dir_SUITE,test_case_2,ok}},
{?eh,test_stats,{2,0,{0,0}}},
{?eh,tc_start,{ct_framework,{end_per_group,test_group,'$proplist'}}},
{?eh,cth,{empty_cth,pre_end_per_group,
- [test_group,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
+ [ct_data_dir_SUITE,test_group,'$proplist',[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
{?eh,cth,{empty_cth,post_end_per_group,
- [test_group,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
+ [ct_data_dir_SUITE,test_group,'$proplist',ok,[{data_dir_name,"ct_data_dir_SUITE_data"}]]}},
{?eh,tc_done,{ct_framework,{end_per_group,test_group,'$proplist'},ok}}],
{?eh,tc_start,{ct_framework,end_per_suite}},
{?eh,cth,{empty_cth,pre_end_per_suite,
@@ -1300,16 +1559,654 @@ test_events(cth_log) ->
[{suite,cth_log_SUITE},parallel]}}},
{?eh,tc_done,{ct_framework,{end_per_group,g1,
[{suite,cth_log_SUITE},parallel]},ok}}]},
-
+
{?eh,tc_done,{cth_log_SUITE,end_per_suite,ok}},
{?eh,test_done,{'DEF','STOP_TIME'}},
{?eh,stop_logging,[]}
];
+test_events(fallback) ->
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,id,[[]]}},
+ {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}},
+ {?eh,tc_start,{all_hook_callbacks_SUITE,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,
+ [all_hook_callbacks_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [all_hook_callbacks_SUITE,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{all_hook_callbacks_SUITE,init_per_suite,ok}},
+
+ [{?eh,tc_start,{ct_framework,{init_per_group,test_group,'$proplist'}}},
+ {?eh,cth,{empty_cth,pre_init_per_group,
+ [fallback_nosuite,test_group,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_group,
+ [fallback_nosuite,test_group,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{ct_framework,
+ {init_per_group,test_group,'$proplist'},ok}},
+ {?eh,tc_start,{all_hook_callbacks_SUITE,test_case}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [fallback_nosuite,test_case,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [fallback_nosuite,test_case,'$proplist',ok,[]]}},
+ {?eh,tc_done,{all_hook_callbacks_SUITE,test_case,ok}},
+ {?eh,test_stats,{1,0,{0,0}}},
+ {?eh,tc_start,{ct_framework,{end_per_group,test_group,'$proplist'}}},
+ {?eh,cth,{empty_cth,pre_end_per_group,
+ [fallback_nosuite,test_group,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_group,
+ [fallback_nosuite,test_group,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_framework,{end_per_group,test_group,'$proplist'},ok}}],
+ {?eh,tc_start,{all_hook_callbacks_SUITE,test_case}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [fallback_nosuite,test_case,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [fallback_nosuite,test_case,'$proplist','_',[]]}},
+ {?eh,cth,{empty_cth,pre_end_per_testcase,
+ [fallback_nosuite,test_case,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [fallback_nosuite,test_case,'$proplist','_',[]]}},
+ {?eh,tc_done,{all_hook_callbacks_SUITE,test_case,ok}},
+ {?eh,test_stats,{2,0,{0,0}}},
+ {?eh,tc_start,{all_hook_callbacks_SUITE,skip_case}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [fallback_nosuite,skip_case,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [fallback_nosuite,skip_case,'$proplist',
+ {skip,"Skipped in init_per_testcase/2"},[]]}},
+ {?eh,tc_done,{all_hook_callbacks_SUITE,skip_case,
+ {skipped,"Skipped in init_per_testcase/2"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [fallback_nosuite,skip_case,
+ {tc_user_skip,"Skipped in init_per_testcase/2"},
+ []]}},
+ {?eh,test_stats,{2,0,{1,0}}},
+ {?eh,tc_start,{all_hook_callbacks_SUITE,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,
+ [all_hook_callbacks_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,
+ [all_hook_callbacks_SUITE,'$proplist','_',[]]}},
+ {?eh,tc_done,{all_hook_callbacks_SUITE,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ];
+
+test_events(callbacks_on_skip) ->
+ %% skip_cth.erl will send a 'cth_error' event if a hook is
+ %% erroneously called. Therefore, all Events are changed to
+ %% {negative,{?eh,cth_error,'_'},Event}
+ %% at the end of this function.
+ Events =
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,id,[[]]}},
+ {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}},
+ {?eh,start_info,{6,6,15}},
+
+ %% all_hook_callbacks_SUITE is skipped in spec
+ %% Only the on_tc_skip callback shall be called
+ {?eh,tc_user_skip,{all_hook_callbacks_SUITE,all,"Skipped in spec"}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [all_hook_callbacks_SUITE,all,
+ {tc_user_skip,"Skipped in spec"},
+ []]}},
+ {?eh,test_stats,{0,0,{1,0}}},
+
+ %% skip_init_SUITE is skipped in its init_per_suite function
+ %% No group- or testcase-functions shall be called.
+ {?eh,tc_start,{skip_init_SUITE,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,
+ [skip_init_SUITE,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [skip_init_SUITE,
+ '$proplist',
+ {skip,"Skipped in init_per_suite/1"},
+ []]}},
+ {?eh,tc_done,{skip_init_SUITE,init_per_suite,
+ {skipped,"Skipped in init_per_suite/1"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_init_SUITE,init_per_suite,
+ {tc_user_skip,"Skipped in init_per_suite/1"},
+ []]}},
+ {?eh,tc_user_skip,{skip_init_SUITE,test_case,"Skipped in init_per_suite/1"}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_init_SUITE,test_case,
+ {tc_user_skip,"Skipped in init_per_suite/1"},
+ []]}},
+ {?eh,test_stats,{0,0,{2,0}}},
+ {?eh,tc_user_skip,{skip_init_SUITE,end_per_suite,
+ "Skipped in init_per_suite/1"}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_init_SUITE,end_per_suite,
+ {tc_user_skip,"Skipped in init_per_suite/1"},
+ []]}},
+
+ %% skip_req_SUITE is auto-skipped since a 'require' statement
+ %% returned by suite/0 is not fulfilled.
+ %% No group- or testcase-functions shall be called.
+ {?eh,tc_start,{skip_req_SUITE,init_per_suite}},
+ {?eh,tc_done,{skip_req_SUITE,init_per_suite,
+ {auto_skipped,{require_failed_in_suite0,
+ {not_available,whatever}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_req_SUITE,init_per_suite,
+ {tc_auto_skip,{require_failed_in_suite0,
+ {not_available,whatever}}},
+ []]}},
+ {?eh,tc_auto_skip,{skip_req_SUITE,test_case,{require_failed_in_suite0,
+ {not_available,whatever}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_req_SUITE,test_case,
+ {tc_auto_skip,{require_failed_in_suite0,
+ {not_available,whatever}}},
+ []]}},
+ {?eh,test_stats,{0,0,{2,1}}},
+ {?eh,tc_auto_skip,{skip_req_SUITE,end_per_suite,
+ {require_failed_in_suite0,
+ {not_available,whatever}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_req_SUITE,end_per_suite,
+ {tc_auto_skip,{require_failed_in_suite0,
+ {not_available,whatever}}},
+ []]}},
+
+ %% skip_fail_SUITE is auto-skipped since the suite/0 function
+ %% retuns a faluty format.
+ %% No group- or testcase-functions shall be called.
+ {?eh,tc_start,{skip_fail_SUITE,init_per_suite}},
+ {?eh,tc_done,{skip_fail_SUITE,init_per_suite,
+ {failed,{error,{suite0_failed,bad_return_value}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_fail_SUITE,init_per_suite,
+ {tc_auto_skip,
+ {failed,{error,{suite0_failed,bad_return_value}}}},
+ []]}},
+ {?eh,tc_auto_skip,{skip_fail_SUITE,test_case,
+ {failed,{error,{suite0_failed,bad_return_value}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_fail_SUITE,test_case,
+ {tc_auto_skip,
+ {failed,{error,{suite0_failed,bad_return_value}}}},
+ []]}},
+ {?eh,test_stats,{0,0,{2,2}}},
+ {?eh,tc_auto_skip,{skip_fail_SUITE,end_per_suite,
+ {failed,{error,{suite0_failed,bad_return_value}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_fail_SUITE,end_per_suite,
+ {tc_auto_skip,
+ {failed,{error,{suite0_failed,bad_return_value}}}},
+ []]}},
+
+ %% skip_group_SUITE
+ {?eh,tc_start,{skip_group_SUITE,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,
+ [skip_group_SUITE,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [skip_group_SUITE,
+ '$proplist',
+ '_',
+ []]}},
+ {?eh,tc_done,{skip_group_SUITE,init_per_suite,ok}},
+
+ %% test_group_1 - auto_skip due to require failed
+ [{?eh,tc_start,{skip_group_SUITE,{init_per_group,test_group_1,[]}}},
+ {?eh,tc_done,
+ {skip_group_SUITE,{init_per_group,test_group_1,[]},
+ {auto_skipped,{require_failed,{not_available,whatever}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {init_per_group,test_group_1},
+ {tc_auto_skip,{require_failed,{not_available,whatever}}},
+ []]}},
+ {?eh,tc_auto_skip,{skip_group_SUITE,{test_case,test_group_1},
+ {require_failed,{not_available,whatever}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {test_case,test_group_1},
+ {tc_auto_skip,{require_failed,{not_available,whatever}}},
+ []]}},
+ {?eh,test_stats,{0,0,{2,3}}},
+ {?eh,tc_auto_skip,{skip_group_SUITE,{end_per_group,test_group_1},
+ {require_failed,{not_available,whatever}}}}],
+ %% The following appears to be outside of the group, but
+ %% that's only an implementation detail in
+ %% ct_test_support.erl - it does not know about events from
+ %% test suite specific hooks and regards the group ended with
+ %% the above tc_auto_skip-event for end_per_group.
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {end_per_group,test_group_1},
+ {tc_auto_skip,{require_failed,{not_available,whatever}}},
+ []]}},
+
+ %% test_group_2 - auto_skip due to failed return from group/1
+ [{?eh,tc_start,{skip_group_SUITE,{init_per_group,test_group_2,[]}}},
+ {?eh,tc_done,
+ {skip_group_SUITE,{init_per_group,test_group_2,[]},
+ {auto_skipped,{group0_failed,bad_return_value}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {init_per_group,test_group_2},
+ {tc_auto_skip,{group0_failed,bad_return_value}},
+ []]}},
+ {?eh,tc_auto_skip,{skip_group_SUITE,{test_case,test_group_2},
+ {group0_failed,bad_return_value}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {test_case,test_group_2},
+ {tc_auto_skip,{group0_failed,bad_return_value}},
+ []]}},
+ {?eh,test_stats,{0,0,{2,4}}},
+ {?eh,tc_auto_skip,{skip_group_SUITE,{end_per_group,test_group_2},
+ {group0_failed,bad_return_value}}}],
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {end_per_group,test_group_2},
+ {tc_auto_skip,{group0_failed,bad_return_value}},
+ []]}},
+ %% test_group_3 - user_skip in init_per_group/2
+ [{?eh,tc_start,
+ {skip_group_SUITE,{init_per_group,test_group_3,[]}}},
+ {?eh,cth,{empty_cth,pre_init_per_group,
+ [skip_group_SUITE,test_group_3,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_group,
+ [skip_group_SUITE,test_group_3,'$proplist',
+ {skip,"Skipped in init_per_group/2"},
+ []]}},
+ {?eh,tc_done,{skip_group_SUITE,
+ {init_per_group,test_group_3,[]},
+ {skipped,"Skipped in init_per_group/2"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {init_per_group,test_group_3},
+ {tc_user_skip,"Skipped in init_per_group/2"},
+ []]}},
+ {?eh,tc_user_skip,{skip_group_SUITE,
+ {test_case,test_group_3},
+ "Skipped in init_per_group/2"}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {test_case,test_group_3},
+ {tc_user_skip,"Skipped in init_per_group/2"},
+ []]}},
+ {?eh,test_stats,{0,0,{3,4}}},
+ {?eh,tc_user_skip,{skip_group_SUITE,
+ {end_per_group,test_group_3},
+ "Skipped in init_per_group/2"}}],
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_group_SUITE,
+ {end_per_group,test_group_3},
+ {tc_user_skip,"Skipped in init_per_group/2"},
+ []]}},
+
+ {?eh,tc_start,{skip_group_SUITE,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,
+ [skip_group_SUITE,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,
+ [skip_group_SUITE,
+ '$proplist',
+ ok,[]]}},
+ {?eh,tc_done,{skip_group_SUITE,end_per_suite,ok}},
+
+
+ %% skip_case_SUITE has 4 test cases which are all skipped in
+ %% different ways
+ {?eh,tc_start,{skip_case_SUITE,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,
+ [skip_case_SUITE,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [skip_case_SUITE,
+ '$proplist',
+ '_',
+ []]}},
+ {?eh,tc_done,{skip_case_SUITE,init_per_suite,ok}},
+
+ %% Skip in spec -> only on_tc_skip shall be called
+ {?eh,tc_user_skip,{skip_case_SUITE,skip_in_spec,"Skipped in spec"}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_case_SUITE,skip_in_spec,
+ {tc_user_skip,"Skipped in spec"},
+ []]}},
+ {?eh,test_stats,{0,0,{4,4}}},
+
+ %% Skip in init_per_testcase -> pre/post_end_per_testcase
+ %% shall not be called
+ {?eh,tc_start,{skip_case_SUITE,skip_in_init}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [skip_case_SUITE,skip_in_init,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [skip_case_SUITE,skip_in_init,
+ '$proplist',
+ {skip,"Skipped in init_per_testcase/2"},
+ []]}},
+ {?eh,tc_done,{skip_case_SUITE,skip_in_init,
+ {skipped,"Skipped in init_per_testcase/2"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_case_SUITE,skip_in_init,
+ {tc_user_skip,"Skipped in init_per_testcase/2"},
+ []]}},
+ {?eh,test_stats,{0,0,{5,4}}},
+
+ %% Fail in init_per_testcase -> pre/post_end_per_testcase
+ %% shall not be called
+ {?eh,tc_start,{skip_case_SUITE,fail_in_init}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [skip_case_SUITE,fail_in_init,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [skip_case_SUITE,fail_in_init,
+ '$proplist',
+ {skip,{failed,'_'}},
+ []]}},
+ {?eh,tc_done,{skip_case_SUITE,fail_in_init,
+ {auto_skipped,{failed,'_'}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_case_SUITE,fail_in_init,
+ {tc_auto_skip,{failed,'_'}},
+ []]}},
+ {?eh,test_stats,{0,0,{5,5}}},
+
+ %% Exit in init_per_testcase -> pre/post_end_per_testcase
+ %% shall not be called
+ {?eh,tc_start,{skip_case_SUITE,exit_in_init}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [skip_case_SUITE,exit_in_init,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [skip_case_SUITE,exit_in_init,
+ '$proplist',
+ {skip,{failed,'_'}},
+ []]}},
+ {?eh,tc_done,{skip_case_SUITE,exit_in_init,
+ {auto_skipped,{failed,'_'}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_case_SUITE,exit_in_init,
+ {tc_auto_skip,{failed,'_'}},
+ []]}},
+ {?eh,test_stats,{0,0,{5,6}}},
+
+ %% Fail in end_per_testcase -> all hooks shall be called and
+ %% test shall succeed.
+ {?eh,tc_start,{skip_case_SUITE,fail_in_end}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [skip_case_SUITE,fail_in_end,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [skip_case_SUITE,fail_in_end,
+ '$proplist',
+ ok,
+ []]}},
+ {?eh,cth,{empty_cth,pre_end_per_testcase,
+ [skip_case_SUITE,fail_in_end,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [skip_case_SUITE,fail_in_end,
+ '$proplist',
+ {failed,
+ {skip_case_SUITE,end_per_testcase,
+ {'EXIT',
+ {test_case_failed,"Failed in end_per_testcase/2"}}}},
+ []]}},
+ {?eh,tc_done,{skip_case_SUITE,fail_in_end,
+ {failed,
+ {skip_case_SUITE,end_per_testcase,
+ {'EXIT',
+ {test_case_failed,"Failed in end_per_testcase/2"}}}}}},
+ {?eh,test_stats,{1,0,{5,6}}},
+
+ %% Exit in end_per_testcase -> all hooks shall be called and
+ %% test shall succeed.
+ {?eh,tc_start,{skip_case_SUITE,exit_in_end}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [skip_case_SUITE,exit_in_end,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [skip_case_SUITE,exit_in_end,
+ '$proplist',
+ ok,
+ []]}},
+ {?eh,cth,{empty_cth,pre_end_per_testcase,
+ [skip_case_SUITE,exit_in_end,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [skip_case_SUITE,exit_in_end,
+ '$proplist',
+ {failed,
+ {skip_case_SUITE,end_per_testcase,
+ {'EXIT',"Exit in end_per_testcase/2"}}},
+ []]}},
+ {?eh,tc_done,{skip_case_SUITE,exit_in_end,
+ {failed,
+ {skip_case_SUITE,end_per_testcase,
+ {'EXIT',"Exit in end_per_testcase/2"}}}}},
+ {?eh,test_stats,{2,0,{5,6}}},
+
+ %% Skip in testcase function -> all callbacks shall be called
+ {?eh,tc_start,{skip_case_SUITE,skip_in_case}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [skip_case_SUITE,skip_in_case,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [skip_case_SUITE,skip_in_case,
+ '$proplist',
+ ok,[]]}},
+ {?eh,cth,{empty_cth,pre_end_per_testcase,
+ [skip_case_SUITE,skip_in_case,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [skip_case_SUITE,skip_in_case,
+ '$proplist',
+ {skip,"Skipped in test case function"},
+ []]}},
+ {?eh,tc_done,{skip_case_SUITE,skip_in_case,
+ {skipped,"Skipped in test case function"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_case_SUITE,skip_in_case,
+ {tc_user_skip,"Skipped in test case function"},
+ []]}},
+ {?eh,test_stats,{2,0,{6,6}}},
+
+ %% Auto skip due to failed 'require' -> only the on_tc_skip
+ %% callback shall be called
+ {?eh,tc_start,{skip_case_SUITE,req_auto_skip}},
+ {?eh,tc_done,{skip_case_SUITE,req_auto_skip,
+ {auto_skipped,{require_failed,{not_available,whatever}}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_case_SUITE,req_auto_skip,
+ {tc_auto_skip,{require_failed,{not_available,whatever}}},
+ []]}},
+ {?eh,test_stats,{2,0,{6,7}}},
+
+ %% Auto skip due to failed testcase/0 function -> only the
+ %% on_tc_skip callback shall be called
+ {?eh,tc_start,{skip_case_SUITE,fail_auto_skip}},
+ {?eh,tc_done,{skip_case_SUITE,fail_auto_skip,
+ {auto_skipped,{testcase0_failed,bad_return_value}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [skip_case_SUITE,fail_auto_skip,
+ {tc_auto_skip,{testcase0_failed,bad_return_value}},
+ []]}},
+ {?eh,test_stats,{2,0,{6,8}}},
+
+ {?eh,tc_start,{skip_case_SUITE,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,
+ [skip_case_SUITE,
+ '$proplist',
+ []]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,
+ [skip_case_SUITE,
+ '$proplist',
+ ok,[]]}},
+ {?eh,tc_done,{skip_case_SUITE,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ],
+ %% Make sure no 'cth_error' events are received!
+ [{negative,{?eh,cth_error,'_'},E} || E <- Events];
+
+test_events(failed_sequence) ->
+ %% skip_cth.erl will send a 'cth_error' event if a hook is
+ %% erroneously called. Therefore, all Events are changed to
+ %% {negative,{?eh,cth_error,'_'},Event}
+ %% at the end of this function.
+ Events =
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,id,[[]]}},
+ {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}},
+ {?eh,start_info,{1,1,2}},
+ {?eh,tc_start,{ct_framework,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,[seq_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [seq_SUITE,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{ct_framework,init_per_suite,ok}},
+ {?eh,tc_start,{seq_SUITE,test_case_1}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [seq_SUITE,test_case_1,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [seq_SUITE,test_case_1,'$proplist',ok,[]]}},
+ {?eh,cth,{empty_cth,pre_end_per_testcase,
+ [seq_SUITE,test_case_1,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [seq_SUITE,test_case_1,'$proplist',
+ {error,failed_on_purpose},[]]}},
+ {?eh,tc_done,{seq_SUITE,test_case_1,{failed,{error,failed_on_purpose}}}},
+ {?eh,cth,{empty_cth,on_tc_fail,
+ [seq_SUITE,test_case_1,failed_on_purpose,[]]}},
+ {?eh,test_stats,{0,1,{0,0}}},
+ {?eh,tc_start,{seq_SUITE,test_case_2}},
+ {?eh,tc_done,{seq_SUITE,test_case_2,
+ {auto_skipped,{sequence_failed,seq1,test_case_1}}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [seq_SUITE,test_case_2,
+ {tc_auto_skip,{sequence_failed,seq1,test_case_1}},
+ []]}},
+ {?eh,test_stats,{0,1,{0,1}}},
+ {?eh,tc_start,{ct_framework,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,[seq_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,[seq_SUITE,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_framework,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ],
+ %% Make sure no 'cth_error' events are received!
+ [{negative,{?eh,cth_error,'_'},E} || E <- Events];
+
+test_events(repeat_force_stop) ->
+ %% skip_cth.erl will send a 'cth_error' event if a hook is
+ %% erroneously called. Therefore, all Events are changed to
+ %% {negative,{?eh,cth_error,'_'},Event}
+ %% at the end of this function.
+ Events=
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,id,[[]]}},
+ {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}},
+ {?eh,start_info,{1,1,2}},
+ {?eh,tc_start,{ct_framework,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,[repeat_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [repeat_SUITE,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{ct_framework,init_per_suite,ok}},
+ {?eh,tc_start,{repeat_SUITE,test_case_1}},
+ {?eh,cth,{empty_cth,pre_init_per_testcase,
+ [repeat_SUITE,test_case_1,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_testcase,
+ [repeat_SUITE,test_case_1,'$proplist',ok,[]]}},
+ {?eh,cth,{empty_cth,pre_end_per_testcase,
+ [repeat_SUITE,test_case_1,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_testcase,
+ [repeat_SUITE,test_case_1,'$proplist',ok,[]]}},
+ {?eh,tc_done,{repeat_SUITE,test_case_1,ok}},
+ {?eh,test_stats,{1,0,{0,0}}},
+ {?eh,tc_start,{repeat_SUITE,test_case_2}},
+ {?eh,tc_done,{repeat_SUITE,test_case_2,
+ {auto_skipped,
+ "Repeated test stopped by force_stop option"}}},
+ {?eh,cth,{empty_cth,on_tc_skip,
+ [repeat_SUITE,test_case_2,
+ {tc_auto_skip,"Repeated test stopped by force_stop option"},
+ []]}},
+ {?eh,test_stats,{1,0,{0,1}}},
+ {?eh,tc_start,{ct_framework,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,[repeat_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,
+ [repeat_SUITE,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_framework,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ],
+ %% Make sure no 'cth_error' events are received!
+ [{negative,{?eh,cth_error,'_'},E} || E <- Events];
+
+test_events(config_clash) ->
+ %% skip_cth.erl will send a 'cth_error' event if a hook is
+ %% erroneously called. Therefore, all Events are changed to
+ %% {negative,{?eh,cth_error,'_'},Event}
+ %% at the end of this function.
+ Events =
+ [
+ {?eh,start_logging,{'DEF','RUNDIR'}},
+ {?eh,test_start,{'DEF',{'START_TIME','LOGDIR'}}},
+ {?eh,cth,{empty_cth,id,[[]]}},
+ {?eh,cth,{empty_cth,init,[{'_','_','_'},[]]}},
+ {?eh,start_info,{1,1,1}},
+ {?eh,tc_start,{ct_framework,init_per_suite}},
+ {?eh,cth,{empty_cth,pre_init_per_suite,
+ [config_clash_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_init_per_suite,
+ [config_clash_SUITE,'$proplist','$proplist',[]]}},
+ {?eh,tc_done,{ct_framework,init_per_suite,ok}},
+ {?eh,tc_start,{config_clash_SUITE,test_case_1}},
+ {?eh,tc_done,{config_clash_SUITE,test_case_1,
+ {failed,{error,{config_name_already_in_use,[aa]}}}}},
+ {?eh,cth,{empty_cth,on_tc_fail,
+ [config_clash_SUITE,test_case_1,
+ {config_name_already_in_use,[aa]},
+ []]}},
+ {?eh,test_stats,{0,1,{0,0}}},
+ {?eh,tc_start,{ct_framework,end_per_suite}},
+ {?eh,cth,{empty_cth,pre_end_per_suite,
+ [config_clash_SUITE,'$proplist',[]]}},
+ {?eh,cth,{empty_cth,post_end_per_suite,
+ [config_clash_SUITE,'$proplist',ok,[]]}},
+ {?eh,tc_done,{ct_framework,end_per_suite,ok}},
+ {?eh,test_done,{'DEF','STOP_TIME'}},
+ {?eh,cth,{empty_cth,terminate,[[]]}},
+ {?eh,stop_logging,[]}
+ ],
+ %% Make sure no 'cth_error' events are received!
+ [{negative,{?eh,cth_error,'_'},E} || E <- Events];
+
test_events(ok) ->
ok.
-
%% test events help functions
contains(List) ->
fun(Proplist) when is_list(Proplist) ->
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/all_hook_callbacks_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/all_hook_callbacks_SUITE.erl
new file mode 100644
index 0000000000..5b50548694
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/all_hook_callbacks_SUITE.erl
@@ -0,0 +1,62 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(all_hook_callbacks_SUITE).
+
+-suite_defaults([{timetrap, {minutes, 10}}]).
+
+%% Note: This directive should only be used in test suites.
+-compile(export_all).
+
+-include("ct.hrl").
+
+%% Test server callback functions
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_group(Config) ->
+ Config.
+
+end_per_group(_Config) ->
+ ok.
+
+init_per_testcase(skip_case, Config) ->
+ {skip,"Skipped in init_per_testcase/2"};
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_TestCase, _Config) ->
+ ok.
+
+all() ->
+ [{group,test_group},test_case,skip_case].
+
+groups() ->
+ [{test_group,[test_case]}].
+
+%% Test cases starts here.
+test_case(Config) ->
+ ok.
+
+skip_case(Config) ->
+ ok.
diff --git a/lib/percept/src/percept.appup.src b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/config_clash_SUITE.erl
index 3ccdf8db2b..f74c757cc1 100644
--- a/lib/percept/src/percept.appup.src
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/config_clash_SUITE.erl
@@ -1,8 +1,8 @@
-%% -*- erlang -*-
+%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
@@ -14,9 +14,30 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
+%%
%% %CopyrightEnd%
-{"%VSN%",
- [{<<".*">>,[{restart_application, percept}]}],
- [{<<".*">>,[{restart_application, percept}]}]
-}.
+%%
+
+-module(config_clash_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+suite() ->
+ [{require,aa,yy},{default_config,yy,"this is a default value"}].
+
+init_per_testcase(_,Config) ->
+ Config.
+
+end_per_testcase(_,_) ->
+ ok.
+
+all() ->
+ [test_case_1].
+
+%% Test cases starts here.
+test_case_1() ->
+ [{require,aa,xx}].
+test_case_1(_Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_end_config_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_end_config_SUITE.erl
new file mode 100644
index 0000000000..7cdaf2024b
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_end_config_SUITE.erl
@@ -0,0 +1,51 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(ct_no_end_config_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+%%% This suite is used to verify that all pre/post_end_per_* callbacks
+%%% are called with correct SuiteName even if no end_per_* config
+%%% function exist in the suite, and that the non-exported config
+%%% functions fail with 'undef'.
+
+init_per_suite(Config) ->
+ Config.
+
+init_per_group(_Group,Config) ->
+ Config.
+
+init_per_testcase(_TC,Config) ->
+ Config.
+
+all() ->
+ [test_case_1, {group,test_group}].
+
+groups() ->
+ [{test_group,[],[test_case_2]}].
+
+test_case_1(Config) ->
+ ok.
+
+test_case_2(Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_config_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_config_SUITE.erl
new file mode 100644
index 0000000000..43c062d66f
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_config_SUITE.erl
@@ -0,0 +1,54 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(ct_no_init_config_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+%%% This suite is used to verify that all
+%%% pre/post_init_per_group/testcase callbacks are called with correct
+%%% SuiteName even if no init_per_group/testcase function exist in the
+%%% suite, and that the non-exported config functions fail with 'undef'.
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(Config) ->
+ Config.
+
+end_per_group(_Group,Config) ->
+ Config.
+
+end_per_testcase(_TC,Config) ->
+ Config.
+
+all() ->
+ [test_case_1, {group,test_group}].
+
+groups() ->
+ [{test_group,[],[test_case_2]}].
+
+test_case_1(Config) ->
+ ok.
+
+test_case_2(Config) ->
+ ok.
diff --git a/lib/percept/src/egd.hrl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_suite_config_SUITE.erl
index fc0a7e10ee..85dfe8ca4b 100644
--- a/lib/percept/src/egd.hrl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/ct_no_init_suite_config_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -16,30 +16,24 @@
%% limitations under the License.
%%
%% %CopyrightEnd%
+%%
+
+-module(ct_no_init_suite_config_SUITE).
+
+-compile(export_all).
--type rgba_float() :: {float(), float(), float(), float()}.
--type rgba_byte() :: {byte(), byte(), byte(), byte()}.
--type rgb() :: {byte(), byte(), byte()}.
+-include("ct.hrl").
--record(image_object, {
- type,
- points = [],
- span,
- internals,
- intervals,
- color}). % RGBA in float values
+%%% This suite is used to verify that pre/post_init_per_suite
+%%% callbacks are called with correct SuiteName even if no
+%%% init_per_suite function exist in the suite, and that the
+%%% non-exported config function fails with 'undef'.
--record(image, {
- width,
- height,
- objects = [],
- background = {1.0,1.0,1.0,1.0},
- image}).
+end_per_suite(Config) ->
+ Config.
--define(debug, void).
+all() ->
+ [test_case].
--ifdef(debug).
--define(dbg(X), io:format("DEBUG: ~p:~p~n",[?MODULE, X])).
--else.
--define(dbg(X), ok).
--endif.
+test_case(Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
index c00eb5cf93..37742f0d20 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/empty_cth.erl
@@ -44,18 +44,18 @@
-export([pre_end_per_suite/3]).
-export([post_end_per_suite/4]).
--export([pre_init_per_group/3]).
--export([post_init_per_group/4]).
--export([pre_end_per_group/3]).
--export([post_end_per_group/4]).
+-export([pre_init_per_group/4]).
+-export([post_init_per_group/5]).
+-export([pre_end_per_group/4]).
+-export([post_end_per_group/5]).
--export([pre_init_per_testcase/3]).
--export([post_init_per_testcase/4]).
--export([pre_end_per_testcase/3]).
--export([post_end_per_testcase/4]).
+-export([pre_init_per_testcase/4]).
+-export([post_init_per_testcase/5]).
+-export([pre_end_per_testcase/4]).
+-export([post_end_per_testcase/5]).
--export([on_tc_fail/3]).
--export([on_tc_skip/3]).
+-export([on_tc_fail/4]).
+-export([on_tc_skip/4]).
-export([terminate/1]).
@@ -154,150 +154,160 @@ post_end_per_suite(Suite,Config,Return,State) ->
%% @doc Called before each init_per_group.
%% You can change the config in this function.
--spec pre_init_per_group(Group :: atom(),
- Config :: config(),
- State :: #state{}) ->
+-spec pre_init_per_group(Suite :: atom(),
+ Group :: atom(),
+ Config :: config(),
+ State :: #state{}) ->
{config() | skip_or_fail(), NewState :: #state{}}.
-pre_init_per_group(Group,Config,State) ->
+pre_init_per_group(Suite,Group,Config,State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_init_per_group,
- [Group,Config,State]}}),
- ct:log("~w:pre_init_per_group(~w) called", [?MODULE,Group]),
+ [Suite,Group,Config,State]}}),
+ ct:log("~w:pre_init_per_group(~w,~w) called", [?MODULE,Suite,Group]),
{Config, State}.
%% @doc Called after each init_per_group.
%% You can change the return value in this function.
--spec post_init_per_group(Group :: atom(),
+-spec post_init_per_group(Suite :: atom(),
+ Group :: atom(),
Config :: config(),
Return :: config() | skip_or_fail(),
State :: #state{}) ->
{config() | skip_or_fail(), NewState :: #state{}}.
-post_init_per_group(Group,Config,Return,State) ->
+post_init_per_group(Suite,Group,Config,Return,State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_init_per_group,
- [Group,Config,Return,State]}}),
- ct:log("~w:post_init_per_group(~w) called", [?MODULE,Group]),
+ [Suite,Group,Config,Return,State]}}),
+ ct:log("~w:post_init_per_group(~w,~w) called", [?MODULE,Suite,Group]),
{Return, State}.
%% @doc Called after each end_per_group. The config/state can be changed here,
%% though it will only affect the *end_per_group functions.
--spec pre_end_per_group(Group :: atom(),
+-spec pre_end_per_group(Suite :: atom(),
+ Group :: atom(),
Config :: config() | skip_or_fail(),
State :: #state{}) ->
{ok | skip_or_fail(), NewState :: #state{}}.
-pre_end_per_group(Group,Config,State) ->
+pre_end_per_group(Suite,Group,Config,State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_end_per_group,
- [Group,Config,State]}}),
- ct:log("~w:pre_end_per_group(~w) called", [?MODULE,Group]),
+ [Suite,Group,Config,State]}}),
+ ct:log("~w:pre_end_per_group(~w~w) called", [?MODULE,Suite,Group]),
{Config, State}.
%% @doc Called after each end_per_group. Note that the config cannot be
%% changed here, only the status of the group.
--spec post_end_per_group(Group :: atom(),
+-spec post_end_per_group(Suite :: atom(),
+ Group :: atom(),
Config :: config(),
Return :: term(),
State :: #state{}) ->
{ok | skip_or_fail(), NewState :: #state{}}.
-post_end_per_group(Group,Config,Return,State) ->
+post_end_per_group(Suite,Group,Config,Return,State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_end_per_group,
- [Group,Config,Return,State]}}),
- ct:log("~w:post_end_per_group(~w) called", [?MODULE,Group]),
+ [Suite,Group,Config,Return,State]}}),
+ ct:log("~w:post_end_per_group(~w,~w) called", [?MODULE,Suite,Group]),
{Return, State}.
%% @doc Called before init_per_testcase/2 for each test case.
%% You can change the config in this function.
--spec pre_init_per_testcase(TC :: atom(),
- Config :: config(),
- State :: #state{}) ->
+-spec pre_init_per_testcase(Suite :: atom(),
+ TC :: atom(),
+ Config :: config(),
+ State :: #state{}) ->
{config() | skip_or_fail(), NewState :: #state{}}.
-pre_init_per_testcase(TC,Config,State) ->
+pre_init_per_testcase(Suite,TC,Config,State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_init_per_testcase,
- [TC,Config,State]}}),
- ct:log("~w:pre_init_per_testcase(~w) called", [?MODULE,TC]),
+ [Suite,TC,Config,State]}}),
+ ct:log("~w:pre_init_per_testcase(~w,~w) called", [?MODULE,Suite,TC]),
{Config, State}.
%% @doc Called after init_per_testcase/2, and before the test case.
--spec post_init_per_testcase(TC :: atom(),
+-spec post_init_per_testcase(Suite :: atom(),
+ TC :: atom(),
Config :: config(),
Return :: config() | skip_or_fail(),
State :: #state{}) ->
{config() | skip_or_fail(), NewState :: #state{}}.
-post_init_per_testcase(TC,Config,Return,State) ->
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_init_per_testcase,
- [TC,Config,Return,State]}}),
- ct:log("~w:post_init_per_testcase(~w) called", [?MODULE,TC]),
+ [Suite,TC,Config,Return,State]}}),
+ ct:log("~w:post_init_per_testcase(~w,~w) called", [?MODULE,Suite,TC]),
{Return, State}.
%% @doc Called before end_per_testacse/2. No skip or fail allowed here,
%% only config additions.
--spec pre_end_per_testcase(TC :: atom(),
- Config :: config(),
- State :: #state{}) ->
+-spec pre_end_per_testcase(Suite :: atom(),
+ TC :: atom(),
+ Config :: config(),
+ State :: #state{}) ->
{config(), NewState :: #state{}}.
-pre_end_per_testcase(TC,Config,State) ->
+pre_end_per_testcase(Suite,TC,Config,State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, pre_end_per_testcase,
- [TC,Config,State]}}),
- ct:log("~w:pre_end_per_testcase(~w) called", [?MODULE,TC]),
+ [Suite,TC,Config,State]}}),
+ ct:log("~w:pre_end_per_testcase(~w,~w) called", [?MODULE,Suite,TC]),
{Config, State}.
%% @doc Called after end_per_testcase/2 for each test case. Note that
%% the config cannot be changed here, only the status of the test case.
--spec post_end_per_testcase(TC :: atom(),
+-spec post_end_per_testcase(Suite :: atom(),
+ TC :: atom(),
Config :: config(),
Return :: term(),
State :: #state{}) ->
{ok | skip_or_fail(), NewState :: #state{}}.
-post_end_per_testcase(TC,Config,Return,State) ->
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, post_end_per_testcase,
- [TC,Config,Return,State]}}),
- ct:log("~w:post_end_per_testcase(~w) called", [?MODULE,TC]),
+ [Suite,TC,Config,Return,State]}}),
+ ct:log("~w:post_end_per_testcase(~w,~w) called", [?MODULE,Suite,TC]),
{Return, State}.
%% @doc Called after post_init_per_suite, post_end_per_suite, post_init_per_group,
%% post_end_per_group and post_end_per_tc if the suite, group or test case failed.
%% This function should be used for extra cleanup which might be needed.
%% It is not possible to modify the config or the status of the test run.
--spec on_tc_fail(TC :: init_per_suite | end_per_suite |
+-spec on_tc_fail(Suite :: atom(),
+ TC :: init_per_suite | end_per_suite |
init_per_group | end_per_group | atom() |
{Function :: atom(), GroupName :: atom()},
Reason :: term(), State :: #state{}) -> NewState :: #state{}.
-on_tc_fail(TC, Reason, State) ->
+on_tc_fail(Suite, TC, Reason, State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, on_tc_fail,
- [TC,Reason,State]}}),
- ct:log("~w:on_tc_fail(~w) called", [?MODULE,TC]),
+ [Suite,TC,Reason,State]}}),
+ ct:log("~w:on_tc_fail(~w,~w) called", [?MODULE,Suite,TC]),
State.
%% @doc Called when a test case is skipped by either user action
%% or due to an init function failing. Test case can be
%% end_per_suite, init_per_group, end_per_group and the actual test cases.
--spec on_tc_skip(TC :: end_per_suite |
+-spec on_tc_skip(Suite :: atom(),
+ TC :: end_per_suite |
init_per_group | end_per_group | atom() |
{Function :: atom(), GroupName :: atom()},
{tc_auto_skip, {failed, {Mod :: atom(), Function :: atom(), Reason :: term()}}} |
{tc_user_skip, {skipped, Reason :: term()}},
State :: #state{}) -> NewState :: #state{}.
-on_tc_skip(TC, Reason, State) ->
+on_tc_skip(Suite, TC, Reason, State) ->
gen_event:notify(
?CT_EVMGR_REF, #event{ name = cth, node = node(),
data = {?MODULE, on_tc_skip,
- [TC,Reason,State]}}),
- ct:log("~w:on_tc_skip(~w) called", [?MODULE,TC]),
+ [Suite,TC,Reason,State]}}),
+ ct:log("~w:on_tc_skip(~w,~w) called", [?MODULE,Suite,TC]),
State.
%% @doc Called when the scope of the CTH is done, this depends on
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl
index 559b22bc9f..141b933697 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_post_suite_cth.erl
@@ -45,29 +45,29 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl
index 51202443bf..07d7c84ed5 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fail_pre_suite_cth.erl
@@ -45,35 +45,35 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fallback_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fallback_cth.erl
new file mode 100644
index 0000000000..59a3d5cbf9
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/fallback_cth.erl
@@ -0,0 +1,81 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+
+-module(fallback_cth).
+
+
+-include_lib("common_test/src/ct_util.hrl").
+-include_lib("common_test/include/ct_event.hrl").
+
+
+%% CT Hooks
+-compile(export_all).
+
+id(Opts) ->
+ empty_cth:id(Opts).
+
+init(Id, Opts) ->
+ empty_cth:init(Id, Opts).
+
+pre_init_per_suite(Suite, Config, State) ->
+ empty_cth:pre_init_per_suite(Suite,Config,State).
+
+post_init_per_suite(Suite,Config,Return,State) ->
+ empty_cth:post_init_per_suite(Suite,Config,Return,State).
+
+pre_end_per_suite(Suite,Config,State) ->
+ empty_cth:pre_end_per_suite(Suite,Config,State).
+
+post_end_per_suite(Suite,Config,Return,State) ->
+ empty_cth:post_end_per_suite(Suite,Config,Return,State).
+
+pre_init_per_group(Group,Config,State) ->
+ empty_cth:pre_init_per_group(fallback_nosuite,Group,Config,State).
+
+post_init_per_group(Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(fallback_nosuite,Group,Config,Return,State).
+
+pre_end_per_group(Group,Config,State) ->
+ empty_cth:pre_end_per_group(fallback_nosuite,Group,Config,State).
+
+post_end_per_group(Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(fallback_nosuite,Group,Config,Return,State).
+
+pre_init_per_testcase(TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(fallback_nosuite,TC,Config,State).
+
+post_init_per_testcase(TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(fallback_nosuite,TC,Config,Return,State).
+
+pre_end_per_testcase(TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(fallback_nosuite,TC,Config,State).
+
+post_end_per_testcase(TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(fallback_nosuite,TC,Config,Return,State).
+
+on_tc_fail(TC, Reason, State) ->
+ empty_cth:on_tc_fail(fallback_nosuite,TC,Reason,State).
+
+on_tc_skip(TC, Reason, State) ->
+ empty_cth:on_tc_skip(fallback_nosuite,TC,Reason,State).
+
+terminate(State) ->
+ empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
index b49cbe7fb4..679f076f3a 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/minimal_terminate_cth.erl
@@ -29,13 +29,13 @@
%% CT Hooks
-export([init/2]).
-export([terminate/1]).
--export([on_tc_skip/3]).
+-export([on_tc_skip/4]).
init(Id, Opts) ->
empty_cth:init(Id, Opts).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite, TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl
index a687743641..95bb76b4c1 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/prio_cth.erl
@@ -47,35 +47,35 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl
index 4d9c60f1ca..3562d39967 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/recover_post_suite_cth.erl
@@ -47,35 +47,35 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/typer/src/typer.appup.src b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/repeat_SUITE.erl
index 3b7464a97c..fded4c02ab 100644
--- a/lib/typer/src/typer.appup.src
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/repeat_SUITE.erl
@@ -1,7 +1,7 @@
-%% -*- erlang -*-
+%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2014-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -16,7 +16,27 @@
%% limitations under the License.
%%
%% %CopyrightEnd%
-{"%VSN%",
- [{<<".*">>,[{restart_application, typer}]}],
- [{<<".*">>,[{restart_application, typer}]}]
-}.
+%%
+
+-module(repeat_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+init_per_testcase(_,Config) ->
+ Config.
+
+end_per_testcase(_,_) ->
+ ok.
+
+all() ->
+ [test_case_1, test_case_2].
+
+%% Test cases starts here.
+test_case_1(_Config) ->
+ timer:sleep(10000),
+ ok.
+
+test_case_2(_Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl
index 494f398fc1..b9d9d4cec1 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/same_id_cth.erl
@@ -48,35 +48,35 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/percept/src/percept.app.src b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/seq_SUITE.erl
index ab0d9a4d90..6d1302fd35 100644
--- a/lib/percept/src/percept.app.src
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/seq_SUITE.erl
@@ -1,8 +1,8 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
@@ -14,32 +14,32 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
+%%
%% %CopyrightEnd%
%%
-{application,percept, [
- {description, "PERCEPT Erlang Concurrency Profiling Tool"},
- {vsn, "%VSN%"},
- {modules, [
- egd,
- egd_font,
- egd_png,
- egd_primitives,
- egd_render,
- percept,
- percept_analyzer,
- percept_db,
- percept_graph,
- percept_html,
- percept_image
- ]},
- {registered, [percept_db,percept_port]},
- {applications, [kernel,stdlib]},
- {env,[]},
- {runtime_dependencies, ["stdlib-2.0","runtime_tools-1.8.14","kernel-3.0",
- "inets-5.10","erts-9.0"]}
-]}.
-
-
-%% vim: syntax=erlang
+-module(seq_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+init_per_testcase(_,Config) ->
+ Config.
+
+end_per_testcase(_,_) ->
+ ok.
+
+all() ->
+ [{sequence,seq1}].
+
+sequences() ->
+ [{seq1,[test_case_1,test_case_2]}].
+
+%% Test cases starts here.
+test_case_1(_Config) ->
+ exit(failed_on_purpose).
+
+test_case_2(_Config) ->
+ ct:fail("This test shall never be run since test_case_1 fails "
+ "and they are run in sequence").
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip.spec b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip.spec
new file mode 100644
index 0000000000..a271c5e8b2
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip.spec
@@ -0,0 +1,8 @@
+{suites,".",[all_hook_callbacks_SUITE,
+ skip_init_SUITE,
+ skip_req_SUITE,
+ skip_fail_SUITE,
+ skip_group_SUITE,
+ skip_case_SUITE]}.
+{skip_suites,".",all_hook_callbacks_SUITE,"Skipped in spec"}.
+{skip_cases,".",skip_case_SUITE,skip_in_spec,"Skipped in spec"}.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_case_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_case_SUITE.erl
new file mode 100644
index 0000000000..dad80ae914
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_case_SUITE.erl
@@ -0,0 +1,106 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(skip_case_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+suite() ->
+ [].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(Config) ->
+ ok.
+
+init_per_group(_,Config) ->
+ Config.
+
+end_per_group(_,_) ->
+ ok.
+
+init_per_testcase(skip_in_init,Config) ->
+ {skip,"Skipped in init_per_testcase/2"};
+init_per_testcase(fail_in_init,Config) ->
+ ct:fail("Failed in init_per_testcase/2");
+init_per_testcase(exit_in_init,Config) ->
+ exit(self(),"Exit in init_per_testcase/2");
+init_per_testcase(_,Config) ->
+ Config.
+
+end_per_testcase(fail_in_end,_) ->
+ ct:fail("Failed in end_per_testcase/2");
+end_per_testcase(exit_in_end,_) ->
+ exit(self(),"Exit in end_per_testcase/2");
+end_per_testcase(_,_) ->
+ ok.
+
+all() ->
+ [skip_in_spec,
+ skip_in_init,
+ fail_in_init,
+ exit_in_init,
+ fail_in_end,
+ exit_in_end,
+ skip_in_case,
+ req_auto_skip,
+ fail_auto_skip
+ ].
+
+%% Test cases starts here.
+skip_in_spec(Config) ->
+ ct:fail("This test shall never be run. "
+ "It shall be skipped in the test spec.").
+
+skip_in_init(Config) ->
+ ct:fail("This test shall never be run. "
+ "It shall be skipped in init_per_testcase/2.").
+
+fail_in_init(Config) ->
+ ct:fail("This test shall never be run. "
+ "It shall fail in init_per_testcase/2.").
+
+exit_in_init(Config) ->
+ ct:fail("This test shall never be run. "
+ "It shall exit in init_per_testcase/2.").
+
+fail_in_end(Config) ->
+ ok.
+
+exit_in_end(Config) ->
+ ok.
+
+skip_in_case(Config) ->
+ {skip,"Skipped in test case function"}.
+
+req_auto_skip() ->
+ [{require,whatever}].
+req_auto_skip(Config) ->
+ ct:fail("This test shall never be run due to "
+ "failed require").
+
+fail_auto_skip() ->
+ faulty_return_value.
+fail_auto_skip(Config) ->
+ ct:fail("This test shall never be run due to "
+ "faulty return from info function").
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_cth.erl
new file mode 100644
index 0000000000..16f015fe7a
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_cth.erl
@@ -0,0 +1,182 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+
+-module(skip_cth).
+
+
+-include_lib("common_test/src/ct_util.hrl").
+-include_lib("common_test/include/ct_event.hrl").
+
+%% Send a cth_error event if a callback is called with unexpected arguments
+-define(fail(Info),
+ gen_event:notify(
+ ?CT_EVMGR_REF,
+ #event{ name = cth_error,
+ node = node(),
+ data = {illegal_hook_callback,{?MODULE,?FUNCTION_NAME,Info}}})).
+
+%% CT Hooks
+-compile(export_all).
+
+id(Opts) ->
+ empty_cth:id(Opts).
+
+init(Id, Opts) ->
+ empty_cth:init(Id, Opts).
+
+pre_init_per_suite(Suite, Config, State) ->
+ Suite==skip_init_SUITE
+ orelse Suite==skip_group_SUITE
+ orelse Suite==skip_case_SUITE
+ orelse Suite==seq_SUITE
+ orelse Suite==repeat_SUITE
+ orelse Suite==config_clash_SUITE
+ orelse ?fail(Suite),
+ empty_cth:pre_init_per_suite(Suite,Config,State).
+
+post_init_per_suite(Suite,Config,Return,State) ->
+ Suite==skip_init_SUITE
+ orelse Suite==skip_group_SUITE
+ orelse Suite==skip_case_SUITE
+ orelse Suite==seq_SUITE
+ orelse Suite==repeat_SUITE
+ orelse Suite==config_clash_SUITE
+ orelse ?fail(Suite),
+ empty_cth:post_init_per_suite(Suite,Config,Return,State).
+
+pre_end_per_suite(Suite,Config,State) ->
+ Suite==skip_case_SUITE
+ orelse Suite==skip_group_SUITE
+ orelse Suite==seq_SUITE
+ orelse Suite==repeat_SUITE
+ orelse Suite==config_clash_SUITE
+ orelse ?fail(Suite),
+ empty_cth:pre_end_per_suite(Suite,Config,State).
+
+post_end_per_suite(Suite,Config,Return,State) ->
+ Suite==skip_case_SUITE
+ orelse Suite==skip_group_SUITE
+ orelse Suite==seq_SUITE
+ orelse Suite==repeat_SUITE
+ orelse Suite==config_clash_SUITE
+ orelse ?fail(Suite),
+ empty_cth:post_end_per_suite(Suite,Config,Return,State).
+
+pre_init_per_group(Suite,Group,Config,State) ->
+ (Suite==skip_group_SUITE andalso Group==test_group_3)
+ orelse ?fail({Suite,Group}),
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
+
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ (Suite==skip_group_SUITE andalso Group==test_group_3)
+ orelse ?fail({Suite,Group}),
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
+
+pre_end_per_group(Suite,Group,Config,State) ->
+ ?fail({Suite,Group}),
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
+
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ ?fail({Suite,Group}),
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
+
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ (Suite==skip_case_SUITE andalso (TC==skip_in_init
+ orelse TC==fail_in_init
+ orelse TC==exit_in_init
+ orelse TC==fail_in_end
+ orelse TC==exit_in_end
+ orelse TC==skip_in_case))
+ orelse (Suite==seq_SUITE andalso TC==test_case_1)
+ orelse (Suite==repeat_SUITE andalso TC==test_case_1)
+ orelse ?fail({Suite,TC}),
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
+
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ (Suite==skip_case_SUITE andalso (TC==skip_in_init
+ orelse TC==fail_in_init
+ orelse TC==exit_in_init
+ orelse TC==fail_in_end
+ orelse TC==exit_in_end
+ orelse TC==skip_in_case))
+ orelse (Suite==seq_SUITE andalso TC==test_case_1)
+ orelse (Suite==repeat_SUITE andalso TC==test_case_1)
+ orelse ?fail({Suite,TC}),
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
+
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ (Suite==skip_case_SUITE andalso (TC==skip_in_case
+ orelse TC==fail_in_end
+ orelse TC==exit_in_end))
+ orelse (Suite==seq_SUITE andalso TC==test_case_1)
+ orelse (Suite==repeat_SUITE andalso TC==test_case_1)
+ orelse ?fail({Suite,TC}),
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
+
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ (Suite==skip_case_SUITE andalso (TC==skip_in_case
+ orelse TC==fail_in_end
+ orelse TC==exit_in_end))
+ orelse (Suite==seq_SUITE andalso TC==test_case_1)
+ orelse (Suite==repeat_SUITE andalso TC==test_case_1)
+ orelse ?fail({Suite,TC}),
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
+
+on_tc_fail(Suite,TC,Reason,State) ->
+ (Suite==seq_SUITE andalso TC==test_case_1)
+ orelse (Suite==config_clash_SUITE andalso TC==test_case_1)
+ orelse ?fail({Suite,TC}),
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
+
+on_tc_skip(all_hook_callbacks_SUITE=Suite,all=TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State);
+on_tc_skip(Suite,TC,Reason,State)
+ when (Suite==skip_init_SUITE
+ orelse Suite==skip_req_SUITE
+ orelse Suite==skip_fail_SUITE)
+ andalso
+ (TC==init_per_suite
+ orelse TC==test_case
+ orelse TC==end_per_suite) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State);
+on_tc_skip(skip_group_SUITE=Suite,TC={C,G},Reason,State)
+ when (C==init_per_group orelse C==test_case orelse C==end_per_group) andalso
+ (G==test_group_1 orelse G==test_group_2 orelse G==test_group_3) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State);
+on_tc_skip(skip_case_SUITE=Suite,TC,Reason,State)
+ when TC==skip_in_spec;
+ TC==skip_in_init;
+ TC==fail_in_init;
+ TC==exit_in_init;
+ TC==skip_in_case;
+ TC==req_auto_skip;
+ TC==fail_auto_skip ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State);
+on_tc_skip(Suite,TC,Reason,State)
+ when (Suite==seq_SUITE andalso TC==test_case_2)
+ orelse (Suite==repeat_SUITE andalso TC==test_case_2) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State);
+on_tc_skip(Suite,TC,Reason,State) ->
+ ?fail({Suite,TC}),
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
+
+terminate(State) ->
+ empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_fail_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_fail_SUITE.erl
new file mode 100644
index 0000000000..9f5dfee6b9
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_fail_SUITE.erl
@@ -0,0 +1,53 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(skip_fail_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+suite() ->
+ faulty_return_value.
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(Config) ->
+ ok.
+
+init_per_group(_,Config) ->
+ Config.
+
+end_per_group(_,_) ->
+ ok.
+
+init_per_testcase(_,Config) ->
+ Config.
+
+end_per_testcase(_,_) ->
+ ok.
+
+all() ->
+ [test_case].
+
+%% Test cases starts here.
+test_case(Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_group_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_group_SUITE.erl
new file mode 100644
index 0000000000..d3b848bfbd
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_group_SUITE.erl
@@ -0,0 +1,64 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(skip_group_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+suite() ->
+ [].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(Config) ->
+ ok.
+
+group(test_group_1) ->
+ [{require,whatever}];
+group(test_group_2) ->
+ faulty_return_value;
+group(_) ->
+ [].
+
+init_per_group(test_group_3,Config) ->
+ {skip,"Skipped in init_per_group/2"};
+init_per_group(_,Config) ->
+ ct:fail("This shall never be run due to auto_skip from group/1").
+
+end_per_group(_,_) ->
+ ct:fail("This shall never be run").
+
+all() ->
+ [{group,test_group_1},
+ {group,test_group_2},
+ {group,test_group_3}].
+
+groups() ->
+ [{test_group_1,[test_case]},
+ {test_group_2,[test_case]},
+ {test_group_3,[test_case]}].
+
+%% Test cases starts here.
+test_case(_Config) ->
+ ct:fail("This test case shall never be run due to skip on group level").
+
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_init_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_init_SUITE.erl
new file mode 100644
index 0000000000..70305421ac
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_init_SUITE.erl
@@ -0,0 +1,53 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(skip_init_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+suite() ->
+ [].
+
+init_per_suite(Config) ->
+ {skip,"Skipped in init_per_suite/1"}.
+
+end_per_suite(Config) ->
+ ok.
+
+init_per_group(_,Config) ->
+ Config.
+
+end_per_group(_,_) ->
+ ok.
+
+init_per_testcase(_,Config) ->
+ Config.
+
+end_per_testcase(_,_) ->
+ ok.
+
+all() ->
+ [test_case].
+
+%% Test cases starts here.
+test_case(Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl
index d5b347e723..48a2d70e22 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_post_suite_cth.erl
@@ -45,35 +45,35 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl
index 36abac0bf8..d638954d3c 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_end_cth.erl
@@ -46,36 +46,36 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State),
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State),
{{skip, "Test skip"}, State}.
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_init_tc_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_init_tc_cth.erl
new file mode 100644
index 0000000000..e1d261d59a
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_init_tc_cth.erl
@@ -0,0 +1,79 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+
+-module(skip_pre_init_tc_cth).
+
+
+-include_lib("common_test/src/ct_util.hrl").
+-include_lib("common_test/include/ct_event.hrl").
+
+
+%% CT Hooks
+-compile(export_all).
+
+init(Id, Opts) ->
+ empty_cth:init(Id, Opts).
+
+pre_init_per_suite(Suite, Config, State) ->
+ empty_cth:pre_init_per_suite(Suite,Config,State).
+
+post_init_per_suite(Suite,Config,Return,State) ->
+ empty_cth:post_init_per_suite(Suite,Config,Return,State).
+
+pre_end_per_suite(Suite,Config,State) ->
+ empty_cth:pre_end_per_suite(Suite,Config,State).
+
+post_end_per_suite(Suite,Config,Return,State) ->
+ empty_cth:post_end_per_suite(Suite,Config,Return,State).
+
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
+
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
+
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
+
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
+
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State),
+ {{skip, "Skipped in pre_init_per_testcase"}, State}.
+
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
+
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
+
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
+
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
+
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
+
+terminate(State) ->
+ empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl
index fa510b2d54..d7b07ee33c 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_pre_suite_cth.erl
@@ -46,35 +46,35 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_req_SUITE.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_req_SUITE.erl
new file mode 100644
index 0000000000..bc69dd5ea4
--- /dev/null
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/skip_req_SUITE.erl
@@ -0,0 +1,53 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(skip_req_SUITE).
+
+-compile(export_all).
+
+-include("ct.hrl").
+
+suite() ->
+ [{require,whatever}].
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(Config) ->
+ ok.
+
+init_per_group(_,Config) ->
+ Config.
+
+end_per_group(_,_) ->
+ ok.
+
+init_per_testcase(_,Config) ->
+ Config.
+
+end_per_testcase(_,_) ->
+ ok.
+
+all() ->
+ [test_case].
+
+%% Test cases starts here.
+test_case(Config) ->
+ ok.
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl
index 7ec0d458b6..c6e0419c50 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/state_update_cth.erl
@@ -48,44 +48,44 @@ post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State),
{Return, [post_end_per_suite|State]}.
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State),
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State),
{Config, [pre_init_per_group|State]}.
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State),
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State),
{Return, [post_init_per_group|State]}.
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State),
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State),
{Config, [pre_end_per_group|State]}.
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State),
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State),
{Return, [post_end_per_group|State]}.
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State),
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State),
{Config, [pre_init_per_testcase|State]}.
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State),
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State),
{Return, [post_init_per_testcase|State]}.
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State),
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State),
{Config, [pre_end_per_testcase|State]}.
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State),
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State),
{Return, [post_end_per_testcase|State]}.
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State),
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State),
[on_tc_fail|State].
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State),
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State),
[on_tc_skip|State].
terminate(State) ->
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl
index 2b9e726819..10a7047899 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/undef_cth.erl
@@ -44,35 +44,35 @@ pre_end_per_suite(Suite,Config,State) ->
post_end_per_suite(Suite,Config,Return,State) ->
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State).
+pre_init_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State).
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State).
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State).
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State).
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl
index d48981f667..f933c7702e 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/update_config_cth.erl
@@ -50,43 +50,43 @@ post_end_per_suite(Suite,Config,Return,State) ->
NewConfig = [{post_end_per_suite,?now}|Config],
{NewConfig,NewConfig}.
-pre_init_per_group(Group,Config,State) ->
- empty_cth:pre_init_per_group(Group,Config,State),
+pre_init_per_group(Suite, Group,Config,State) ->
+ empty_cth:pre_init_per_group(Suite,Group,Config,State),
{[{pre_init_per_group,?now}|Config],State}.
-post_init_per_group(Group,Config,Return,State) ->
- empty_cth:post_init_per_group(Group,Config,Return,State),
+post_init_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State),
{[{post_init_per_group,?now}|Return],State}.
-pre_end_per_group(Group,Config,State) ->
- empty_cth:pre_end_per_group(Group,Config,State),
+pre_end_per_group(Suite,Group,Config,State) ->
+ empty_cth:pre_end_per_group(Suite,Group,Config,State),
{[{pre_end_per_group,?now}|Config],State}.
-post_end_per_group(Group,Config,Return,State) ->
- empty_cth:post_end_per_group(Group,Config,Return,State),
+post_end_per_group(Suite,Group,Config,Return,State) ->
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State),
{[{post_end_per_group,?now}|Config],State}.
-pre_init_per_testcase(TC,Config,State) ->
- empty_cth:pre_init_per_testcase(TC,Config,State),
+pre_init_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State),
{[{pre_init_per_testcase,?now}|Config],State}.
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State),
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State),
{[{post_init_per_testcase,?now}|Config],State}.
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State),
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State),
{[{pre_end_per_testcase,?now}|Config],State}.
-post_end_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_end_per_testcase(TC,Config,Return,State),
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State),
{[{post_end_per_testcase,?now}|Config],State}.
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl
index 71d84781e0..b29256a77e 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_config_cth.erl
@@ -60,37 +60,37 @@ post_end_per_suite(Suite,Config,Return,State) ->
ct_no_config_SUITE = ct:get_config(suite_cfg),
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
+pre_init_per_group(Suite,Group,Config,State) ->
true = ?val(post_init_per_suite, Config),
ct_no_config_SUITE = ct:get_config(suite_cfg),
test_group = ct:get_config(group_cfg),
- empty_cth:pre_init_per_group(Group,
+ empty_cth:pre_init_per_group(Suite,Group,
[{pre_init_per_group,true} | Config],
State).
-post_init_per_group(Group,Config,Return,State) ->
+post_init_per_group(Suite,Group,Config,Return,State) ->
true = ?val(pre_init_per_group, Return),
test_group = ct:get_config(group_cfg),
- empty_cth:post_init_per_group(Group,
+ empty_cth:post_init_per_group(Suite,Group,
Config,
[{post_init_per_group,true} | Return],
State).
-pre_end_per_group(Group,Config,State) ->
+pre_end_per_group(Suite,Group,Config,State) ->
true = ?val(post_init_per_group, Config),
ct_no_config_SUITE = ct:get_config(suite_cfg),
test_group = ct:get_config(group_cfg),
- empty_cth:pre_end_per_group(Group,
+ empty_cth:pre_end_per_group(Suite,Group,
[{pre_end_per_group,true} | Config],
State).
-post_end_per_group(Group,Config,Return,State) ->
+post_end_per_group(Suite,Group,Config,Return,State) ->
true = ?val(pre_end_per_group, Config),
ct_no_config_SUITE = ct:get_config(suite_cfg),
test_group = ct:get_config(group_cfg),
- empty_cth:post_end_per_group(Group,Config,Return,State).
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
+pre_init_per_testcase(Suite,TC,Config,State) ->
true = ?val(post_init_per_suite, Config),
case ?val(name, ?val(tc_group_properties, Config)) of
undefined ->
@@ -102,19 +102,19 @@ pre_init_per_testcase(TC,Config,State) ->
ct_no_config_SUITE = ct:get_config(suite_cfg),
CfgKey = list_to_atom(atom_to_list(TC) ++ "_cfg"),
TC = ct:get_config(CfgKey),
- empty_cth:pre_init_per_testcase(TC,
+ empty_cth:pre_init_per_testcase(Suite,TC,
[{pre_init_per_testcase,true} | Config],
State).
%%! TODO: Verify Config also in post_init and pre_end!
-post_init_per_testcase(TC,Config,Return,State) ->
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
- empty_cth:pre_end_per_testcase(TC,Config,State).
+pre_end_per_testcase(Suite,TC,Config,State) ->
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
true = ?val(post_init_per_suite, Config),
true = ?val(pre_init_per_testcase, Config),
case ?val(name, ?val(tc_group_properties, Config)) of
@@ -127,13 +127,13 @@ post_end_per_testcase(TC,Config,Return,State) ->
ct_no_config_SUITE = ct:get_config(suite_cfg),
CfgKey = list_to_atom(atom_to_list(TC) ++ "_cfg"),
TC = ct:get_config(CfgKey),
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl
index 9abd2e5e83..42e086b96e 100644
--- a/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl
+++ b/lib/common_test/test/ct_hooks_SUITE_data/cth/tests/verify_data_dir_cth.erl
@@ -62,43 +62,43 @@ post_end_per_suite(Suite,Config,Return,State) ->
check_dirs(State,Config),
empty_cth:post_end_per_suite(Suite,Config,Return,State).
-pre_init_per_group(Group,Config,State) ->
+pre_init_per_group(Suite,Group,Config,State) ->
check_dirs(State,Config),
- empty_cth:pre_init_per_group(Group,Config,State).
+ empty_cth:pre_init_per_group(Suite,Group,Config,State).
-post_init_per_group(Group,Config,Return,State) ->
+post_init_per_group(Suite,Group,Config,Return,State) ->
check_dirs(State,Return),
- empty_cth:post_init_per_group(Group,Config,Return,State).
+ empty_cth:post_init_per_group(Suite,Group,Config,Return,State).
-pre_end_per_group(Group,Config,State) ->
+pre_end_per_group(Suite,Group,Config,State) ->
check_dirs(State,Config),
- empty_cth:pre_end_per_group(Group,Config,State).
+ empty_cth:pre_end_per_group(Suite,Group,Config,State).
-post_end_per_group(Group,Config,Return,State) ->
+post_end_per_group(Suite,Group,Config,Return,State) ->
check_dirs(State,Config),
- empty_cth:post_end_per_group(Group,Config,Return,State).
+ empty_cth:post_end_per_group(Suite,Group,Config,Return,State).
-pre_init_per_testcase(TC,Config,State) ->
+pre_init_per_testcase(Suite,TC,Config,State) ->
check_dirs(State,Config),
- empty_cth:pre_init_per_testcase(TC,Config,State).
+ empty_cth:pre_init_per_testcase(Suite,TC,Config,State).
-post_init_per_testcase(TC,Config,Return,State) ->
+post_init_per_testcase(Suite,TC,Config,Return,State) ->
check_dirs(State,Config),
- empty_cth:post_init_per_testcase(TC,Config,Return,State).
+ empty_cth:post_init_per_testcase(Suite,TC,Config,Return,State).
-pre_end_per_testcase(TC,Config,State) ->
+pre_end_per_testcase(Suite,TC,Config,State) ->
check_dirs(State,Config),
- empty_cth:pre_end_per_testcase(TC,Config,State).
+ empty_cth:pre_end_per_testcase(Suite,TC,Config,State).
-post_end_per_testcase(TC,Config,Return,State) ->
+post_end_per_testcase(Suite,TC,Config,Return,State) ->
check_dirs(State,Config),
- empty_cth:post_end_per_testcase(TC,Config,Return,State).
+ empty_cth:post_end_per_testcase(Suite,TC,Config,Return,State).
-on_tc_fail(TC, Reason, State) ->
- empty_cth:on_tc_fail(TC,Reason,State).
+on_tc_fail(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_fail(Suite,TC,Reason,State).
-on_tc_skip(TC, Reason, State) ->
- empty_cth:on_tc_skip(TC,Reason,State).
+on_tc_skip(Suite,TC, Reason, State) ->
+ empty_cth:on_tc_skip(Suite,TC,Reason,State).
terminate(State) ->
empty_cth:terminate(State).
diff --git a/lib/common_test/test/ct_repeat_testrun_SUITE.erl b/lib/common_test/test/ct_repeat_testrun_SUITE.erl
index f8b6a379f6..76611a2db3 100644
--- a/lib/common_test/test/ct_repeat_testrun_SUITE.erl
+++ b/lib/common_test/test/ct_repeat_testrun_SUITE.erl
@@ -363,14 +363,17 @@ skip_first_tc1(Suite) ->
{?eh,tc_start,{Suite,tc1}},
{?eh,tc_done,{Suite,tc1,ok}},
{?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_start,{Suite,tc2}},
{?eh,tc_done,{Suite,tc2,?skipped}},
{?eh,test_stats,{'_',0,{0,1}}},
+ {?eh,tc_start,{Suite,{init_per_group,g,[]}}},
{?eh,tc_done,{Suite,{init_per_group,g,[]},?skipped}},
{?eh,tc_auto_skip,{Suite,{tc1,g},?skip_reason}},
{?eh,test_stats,{'_',0,{0,2}}},
{?eh,tc_auto_skip,{Suite,{tc2,g},?skip_reason}},
{?eh,test_stats,{'_',0,{0,3}}},
{?eh,tc_auto_skip,{Suite,{end_per_group,g},?skip_reason}},
+ {?eh,tc_start,{Suite,tc2}},
{?eh,tc_done,{Suite,tc2,?skipped}},
{?eh,test_stats,{'_',0,{0,4}}},
{?eh,tc_start,{Suite,end_per_suite}},
@@ -390,10 +393,12 @@ skip_tc1_in_group(Suite) ->
{?eh,tc_start,{Suite,tc1}},
{?eh,tc_done,{Suite,tc1,ok}},
{?eh,test_stats,{'_',0,{0,0}}},
+ {?eh,tc_start,{Suite,tc2}},
{?eh,tc_done,{Suite,tc2,?skipped}},
{?eh,test_stats,{'_',0,{0,1}}},
{?eh,tc_start,{Suite,{end_per_group,g,[]}}},
{?eh,tc_done,{Suite,{end_per_group,g,[]},ok}}],
+ {?eh,tc_start,{Suite,tc2}},
{?eh,tc_done,{Suite,tc2,?skipped}},
{?eh,test_stats,{'_',0,{0,2}}},
{?eh,tc_start,{Suite,end_per_suite}},
diff --git a/lib/common_test/test/ct_surefire_SUITE.erl b/lib/common_test/test/ct_surefire_SUITE.erl
index 42ec685c16..884217afc2 100644
--- a/lib/common_test/test/ct_surefire_SUITE.erl
+++ b/lib/common_test/test/ct_surefire_SUITE.erl
@@ -73,7 +73,9 @@ all() ->
relative_path,
url,
logdir,
- fail_pre_init_per_suite
+ fail_pre_init_per_suite,
+ skip_case_in_spec,
+ skip_suite_in_spec
].
%%--------------------------------------------------------------------
@@ -119,6 +121,18 @@ fail_pre_init_per_suite(Config) when is_list(Config) ->
run(fail_pre_init_per_suite,[fail_pre_init_per_suite,
{cth_surefire,[{path,Path}]}],Path,Config,[],Suites).
+skip_case_in_spec(Config) ->
+ DataDir = ?config(data_dir,Config),
+ Spec = filename:join(DataDir,"skip_one_case.spec"),
+ Path = "skip_case_in_spec.xml",
+ run_spec(skip_case_in_spec,[{cth_surefire,[{path,Path}]}],Path,Config,Spec).
+
+skip_suite_in_spec(Config) ->
+ DataDir = ?config(data_dir,Config),
+ Spec = filename:join(DataDir,"skip_one_suite.spec"),
+ Path = "skip_suite_in_spec.xml",
+ run_spec(skip_suite_in_spec,[{cth_surefire,[{path,Path}]}],Path,Config,Spec).
+
%%%-----------------------------------------------------------------
%%% HELP FUNCTIONS
%%%-----------------------------------------------------------------
@@ -129,8 +143,15 @@ run(Case,CTHs,Report,Config,ExtraOpts) ->
Suite = filename:join(DataDir, "surefire_SUITE"),
run(Case,CTHs,Report,Config,ExtraOpts,Suite).
run(Case,CTHs,Report,Config,ExtraOpts,Suite) ->
- {Opts,ERPid} = setup([{suite,Suite},{ct_hooks,CTHs},{label,Case}|ExtraOpts],
- Config),
+ Test = [{suite,Suite},{ct_hooks,CTHs},{label,Case}|ExtraOpts],
+ do_run(Case, Report, Test, Config).
+
+run_spec(Case,CTHs,Report,Config,Spec) ->
+ Test = [{spec,Spec},{ct_hooks,CTHs},{label,Case}],
+ do_run(Case, Report, Test, Config).
+
+do_run(Case, Report, Test, Config) ->
+ {Opts,ERPid} = setup(Test, Config),
ok = execute(Case, Opts, ERPid, Config),
LogDir =
case lists:keyfind(logdir,1,Opts) of
@@ -201,7 +222,10 @@ test_suite_events(pass_SUITE) ->
{?eh,test_stats,{1,0,{0,0}}},
{?eh,tc_start,{ct_framework,end_per_suite}},
{?eh,tc_done,{ct_framework,end_per_suite,ok}}];
-test_suite_events(_) ->
+test_suite_events(skip_all_surefire_SUITE) ->
+ [{?eh,tc_user_skip,{skip_all_surefire_SUITE,all,"skipped in spec"}},
+ {?eh,test_stats,{0,0,{1,0}}}];
+test_suite_events(Test) ->
[{?eh,tc_start,{surefire_SUITE,init_per_suite}},
{?eh,tc_done,{surefire_SUITE,init_per_suite,ok}},
{?eh,tc_start,{surefire_SUITE,tc_ok}},
@@ -210,46 +234,55 @@ test_suite_events(_) ->
{?eh,tc_start,{surefire_SUITE,tc_fail}},
{?eh,tc_done,{surefire_SUITE,tc_fail,
{failed,{error,{test_case_failed,"this test should fail"}}}}},
- {?eh,test_stats,{1,1,{0,0}}},
- {?eh,tc_start,{surefire_SUITE,tc_skip}},
- {?eh,tc_done,{surefire_SUITE,tc_skip,{skipped,"this test is skipped"}}},
- {?eh,test_stats,{1,1,{1,0}}},
- {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}},
- {?eh,tc_done,{surefire_SUITE,tc_autoskip_require,
- {auto_skipped,{require_failed,'_'}}}},
- {?eh,test_stats,{1,1,{1,1}}},
- [{?eh,tc_start,{surefire_SUITE,{init_per_group,g,[]}}},
- {?eh,tc_done,{surefire_SUITE,{init_per_group,g,[]},ok}},
- {?eh,tc_start,{surefire_SUITE,tc_ok}},
- {?eh,tc_done,{surefire_SUITE,tc_ok,ok}},
- {?eh,test_stats,{2,1,{1,1}}},
- {?eh,tc_start,{surefire_SUITE,tc_fail}},
- {?eh,tc_done,{surefire_SUITE,tc_fail,
- {failed,{error,{test_case_failed,"this test should fail"}}}}},
- {?eh,test_stats,{2,2,{1,1}}},
- {?eh,tc_start,{surefire_SUITE,tc_skip}},
- {?eh,tc_done,{surefire_SUITE,tc_skip,{skipped,"this test is skipped"}}},
- {?eh,test_stats,{2,2,{2,1}}},
- {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}},
- {?eh,tc_done,{surefire_SUITE,tc_autoskip_require,
- {auto_skipped,{require_failed,'_'}}}},
- {?eh,test_stats,{2,2,{2,2}}},
- {?eh,tc_start,{surefire_SUITE,{end_per_group,g,[]}}},
- {?eh,tc_done,{surefire_SUITE,{end_per_group,g,[]},ok}}],
- [{?eh,tc_start,{surefire_SUITE,{init_per_group,g_fail,[]}}},
- {?eh,tc_done,{surefire_SUITE,{init_per_group,g_fail,[]},
- {failed,{error,all_cases_should_be_skipped}}}},
- {?eh,tc_auto_skip,{surefire_SUITE,{tc_ok,g_fail},
- {failed,
- {surefire_SUITE,init_per_group,
- {'EXIT',all_cases_should_be_skipped}}}}},
- {?eh,test_stats,{2,2,{2,3}}},
- {?eh,tc_auto_skip,{surefire_SUITE,{end_per_group,g_fail},
- {failed,
- {surefire_SUITE,init_per_group,
- {'EXIT',all_cases_should_be_skipped}}}}}],
- {?eh,tc_start,{surefire_SUITE,end_per_suite}},
- {?eh,tc_done,{surefire_SUITE,end_per_suite,ok}}].
+ {?eh,test_stats,{1,1,{0,0}}}] ++
+ tc_skip_events(Test,undefined) ++
+ [{?eh,test_stats,{1,1,{1,0}}},
+ {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}},
+ {?eh,tc_done,{surefire_SUITE,tc_autoskip_require,
+ {auto_skipped,{require_failed,'_'}}}},
+ {?eh,test_stats,{1,1,{1,1}}},
+ [{?eh,tc_start,{surefire_SUITE,{init_per_group,g,[]}}},
+ {?eh,tc_done,{surefire_SUITE,{init_per_group,g,[]},ok}},
+ {?eh,tc_start,{surefire_SUITE,tc_ok}},
+ {?eh,tc_done,{surefire_SUITE,tc_ok,ok}},
+ {?eh,test_stats,{2,1,{1,1}}},
+ {?eh,tc_start,{surefire_SUITE,tc_fail}},
+ {?eh,tc_done,{surefire_SUITE,tc_fail,
+ {failed,{error,{test_case_failed,"this test should fail"}}}}},
+ {?eh,test_stats,{2,2,{1,1}}}] ++
+ tc_skip_events(Test,g) ++
+ [{?eh,test_stats,{2,2,{2,1}}},
+ {?eh,tc_start,{surefire_SUITE,tc_autoskip_require}},
+ {?eh,tc_done,{surefire_SUITE,tc_autoskip_require,
+ {auto_skipped,{require_failed,'_'}}}},
+ {?eh,test_stats,{2,2,{2,2}}},
+ {?eh,tc_start,{surefire_SUITE,{end_per_group,g,[]}}},
+ {?eh,tc_done,{surefire_SUITE,{end_per_group,g,[]},ok}}],
+ [{?eh,tc_start,{surefire_SUITE,{init_per_group,g_fail,[]}}},
+ {?eh,tc_done,{surefire_SUITE,{init_per_group,g_fail,[]},
+ {failed,{error,all_cases_should_be_skipped}}}},
+ {?eh,tc_auto_skip,{surefire_SUITE,{tc_ok,g_fail},
+ {failed,
+ {surefire_SUITE,init_per_group,
+ {'EXIT',all_cases_should_be_skipped}}}}},
+ {?eh,test_stats,{2,2,{2,3}}},
+ {?eh,tc_auto_skip,{surefire_SUITE,{end_per_group,g_fail},
+ {failed,
+ {surefire_SUITE,init_per_group,
+ {'EXIT',all_cases_should_be_skipped}}}}}],
+ {?eh,tc_start,{surefire_SUITE,end_per_suite}},
+ {?eh,tc_done,{surefire_SUITE,end_per_suite,ok}}].
+
+tc_skip_events(skip_case_in_spec,Group) ->
+ [{?eh,tc_user_skip,{surefire_SUITE,tc_skip_name(Group),"skipped in spec"}}];
+tc_skip_events(_Test,_Group) ->
+ [{?eh,tc_start,{surefire_SUITE,tc_skip}},
+ {?eh,tc_done,{surefire_SUITE,tc_skip,{skipped,"this test is skipped"}}}].
+
+tc_skip_name(undefined) ->
+ tc_skip;
+tc_skip_name(Group) ->
+ {tc_skip,Group}.
test_events(fail_pre_init_per_suite) ->
[{?eh,start_logging,{'DEF','RUNDIR'}},
@@ -257,6 +290,10 @@ test_events(fail_pre_init_per_suite) ->
test_suite_events(pass_SUITE) ++
test_suite_events(fail_SUITE, {1,0,{0,1}}) ++
[{?eh,stop_logging,[]}];
+test_events(skip_suite_in_spec) ->
+ [{?eh,start_logging,'_'},{?eh,start_info,{1,1,0}}] ++
+ test_suite_events(skip_all_surefire_SUITE) ++
+ [{?eh,stop_logging,[]}];
test_events(Test) ->
[{?eh,start_logging,'_'}, {?eh,start_info,{1,1,9}}] ++
test_suite_events(Test) ++
@@ -364,6 +401,8 @@ failed_or_skipped([]) ->
events_to_result(E) ->
events_to_result(E, []).
+events_to_result([{?eh,tc_user_skip,{_Suite,all,_}}|E], Result) ->
+ events_to_result(E, [[[s]]|Result]);
events_to_result([{?eh,tc_auto_skip,{_Suite,init_per_suite,_}}|E], Result) ->
{Suite,Rest} = events_to_result1(E),
events_to_result(Rest, [[[s]|Suite]|Result]);
@@ -382,7 +421,7 @@ events_to_result1([{?eh,tc_done,{_Suite, end_per_suite,R}}|E]) ->
events_to_result1([{?eh,tc_done,{_Suite,_Case,R}}|E]) ->
{Suite,Rest} = events_to_result1(E),
{[result(R)|Suite],Rest};
-events_to_result1([{?eh,tc_auto_skip,_}|E]) ->
+events_to_result1([{?eh,Skip,_}|E]) when Skip==tc_auto_skip; Skip==tc_user_skip ->
{Suite,Rest} = events_to_result1(E),
{[[s]|Suite],Rest};
events_to_result1([_|E]) ->
diff --git a/lib/common_test/test/ct_surefire_SUITE_data/skip_one_case.spec b/lib/common_test/test/ct_surefire_SUITE_data/skip_one_case.spec
new file mode 100644
index 0000000000..42df8a7d1a
--- /dev/null
+++ b/lib/common_test/test/ct_surefire_SUITE_data/skip_one_case.spec
@@ -0,0 +1,2 @@
+{suites,".",surefire_SUITE}.
+{skip_cases,".",surefire_SUITE,tc_skip,"skipped in spec"}.
diff --git a/lib/common_test/test/ct_surefire_SUITE_data/skip_one_suite.spec b/lib/common_test/test/ct_surefire_SUITE_data/skip_one_suite.spec
new file mode 100644
index 0000000000..57966328ab
--- /dev/null
+++ b/lib/common_test/test/ct_surefire_SUITE_data/skip_one_suite.spec
@@ -0,0 +1,2 @@
+{suites,".",[skip_all_surefire_SUITE]}.
+{skip_suites,".",skip_all_surefire_SUITE,"skipped in spec"}.
diff --git a/lib/common_test/test/ct_test_server_if_1_SUITE.erl b/lib/common_test/test/ct_test_server_if_1_SUITE.erl
index 228d900545..ea8a1a5662 100644
--- a/lib/common_test/test/ct_test_server_if_1_SUITE.erl
+++ b/lib/common_test/test/ct_test_server_if_1_SUITE.erl
@@ -161,6 +161,7 @@ test_events(ts_if_1) ->
{?eh,tc_start,{ts_if_1_SUITE,tc4}},
{?eh,tc_done,{ts_if_1_SUITE,tc4,{failed,{error,failed_on_purpose}}}},
{?eh,test_stats,{1,2,{0,1}}},
+ {?eh,tc_start,{ts_if_1_SUITE,tc5}},
{?eh,tc_done,{ts_if_1_SUITE,tc5,{auto_skipped,{sequence_failed,seq1,tc4}}}},
{?eh,test_stats,{1,2,{0,2}}},
diff --git a/lib/common_test/test/ct_test_support.erl b/lib/common_test/test/ct_test_support.erl
index e926abd885..05a452b99d 100644
--- a/lib/common_test/test/ct_test_support.erl
+++ b/lib/common_test/test/ct_test_support.erl
@@ -765,23 +765,23 @@ locate({parallel,TEvs}, Node, Evs, Config) ->
{Done,RemEvs2,length(RemEvs2)}
end;
%% end_per_group auto- or user skipped
- (TEv={TEH,AutoOrUserSkip,{M,end_per_group,R}}, {Done,RemEvs,_RemSize})
+ (TEv={TEH,AutoOrUserSkip,{M,{end_per_group,G},R}}, {Done,RemEvs,_RemSize})
when AutoOrUserSkip == tc_auto_skip;
AutoOrUserSkip == tc_user_skip ->
RemEvs1 =
lists:dropwhile(
fun({EH,#event{name=tc_auto_skip,
node=EvNode,
- data={Mod,end_per_group,Reason}}}) when
- EH == TEH, EvNode == Node, Mod == M ->
+ data={Mod,{end_per_group,EvGroupName},Reason}}}) when
+ EH == TEH, EvNode == Node, Mod == M, EvGroupName == G ->
case match_data(R, Reason) of
match -> false;
_ -> true
end;
({EH,#event{name=tc_user_skip,
node=EvNode,
- data={Mod,end_per_group,Reason}}}) when
- EH == TEH, EvNode == Node, Mod == M ->
+ data={Mod,{end_per_group,EvGroupName},Reason}}}) when
+ EH == TEH, EvNode == Node, Mod == M, EvGroupName == G ->
case match_data(R, Reason) of
match -> false;
_ -> true
@@ -1008,20 +1008,20 @@ locate({shuffle,TEvs}, Node, Evs, Config) ->
{Done,RemEvs2,length(RemEvs2)}
end;
%% end_per_group auto-or user skipped
- (TEv={TEH,AutoOrUserSkip,{M,end_per_group,R}}, {Done,RemEvs,_RemSize})
+ (TEv={TEH,AutoOrUserSkip,{M,{end_per_group,G},R}}, {Done,RemEvs,_RemSize})
when AutoOrUserSkip == tc_auto_skip;
AutoOrUserSkip == tc_user_skip ->
RemEvs1 =
lists:dropwhile(
fun({EH,#event{name=tc_auto_skip,
node=EvNode,
- data={Mod,end_per_group,Reason}}}) when
- EH == TEH, EvNode == Node, Mod == M, Reason == R ->
+ data={Mod,{end_per_group,EvGroupName},Reason}}}) when
+ EH == TEH, EvNode == Node, Mod == M, EvGroupName == G, Reason == R ->
false;
({EH,#event{name=tc_user_skip,
node=EvNode,
- data={Mod,end_per_group,Reason}}}) when
- EH == TEH, EvNode == Node, Mod == M, Reason == R ->
+ data={Mod,{end_per_group,EvGroupName},Reason}}}) when
+ EH == TEH, EvNode == Node, Mod == M, EvGroupName == G, Reason == R ->
false;
({EH,#event{name=stop_logging,
node=EvNode,data=_}}) when
@@ -1264,10 +1264,10 @@ log_events1([E={_EH,tc_done,{_M,{end_per_group,_GrName,Props},_R}} | Evs], Dev,
io:format(Dev, "~s~p]},~n", [Ind,E]),
log_events1(Evs, Dev, Ind--" ")
end;
-log_events1([E={_EH,tc_auto_skip,{_M,end_per_group,_Reason}} | Evs], Dev, Ind) ->
+log_events1([E={_EH,tc_auto_skip,{_M,{end_per_group,_GrName},_Reason}} | Evs], Dev, Ind) ->
io:format(Dev, "~s~p],~n", [Ind,E]),
log_events1(Evs, Dev, Ind--" ");
-log_events1([E={_EH,tc_user_skip,{_M,end_per_group,_Reason}} | Evs], Dev, Ind) ->
+log_events1([E={_EH,tc_user_skip,{_M,{end_per_group,_GrName},_Reason}} | Evs], Dev, Ind) ->
io:format(Dev, "~s~p],~n", [Ind,E]),
log_events1(Evs, Dev, Ind--" ");
log_events1([E], Dev, Ind) ->
diff --git a/lib/common_test/test/ct_testspec_2_SUITE.erl b/lib/common_test/test/ct_testspec_2_SUITE.erl
index 1a941df185..1bab80942a 100644
--- a/lib/common_test/test/ct_testspec_2_SUITE.erl
+++ b/lib/common_test/test/ct_testspec_2_SUITE.erl
@@ -220,7 +220,24 @@ basic_compatible_no_nodes(_Config) ->
{tc2,{skip,"skipped"}}]}]}],
merge_tests = true},
- verify_result(Verify,ListResult,FileResult).
+ verify_result(Verify,ListResult,FileResult),
+
+ {ok,Tests} = ct_testspec:get_tests([SpecFile]),
+ ct:pal("ct_testspec:get_tests/1:~n~p~n", [Tests]),
+ [{[SpecFile],[{Node,Run,Skip}]}] = Tests,
+ [{Alias1V,x_SUITE,all},
+ {Alias1V,y_SUITE,[{g1,all},{g2,all},tc1,tc2]},
+ {Alias1V,z_SUITE,all},
+ {Alias2V,x_SUITE,all},
+ {Alias2V,y_SUITE,all}] = lists:sort(Run),
+ [{Alias1V,z_SUITE,"skipped"},
+ {Alias2V,x_SUITE,{g1,all},"skipped"},
+ {Alias2V,x_SUITE,{g2,all},"skipped"},
+ {Alias2V,y_SUITE,tc1,"skipped"},
+ {Alias2V,y_SUITE,tc2,"skipped"}] = lists:sort(Skip),
+
+ ok.
+
%%%-----------------------------------------------------------------
%%%
@@ -346,7 +363,25 @@ basic_compatible_nodes(_Config) ->
{tc2,{skip,"skipped"}}]}]}],
merge_tests = true},
- verify_result(Verify,ListResult,FileResult).
+ verify_result(Verify,ListResult,FileResult),
+
+ {ok,Tests} = ct_testspec:get_tests([SpecFile]),
+ ct:pal("ct_testspec:get_tests/1:~n~p~n", [Tests]),
+ [{[SpecFile],[{Node,[],[]},
+ {Node1,Run1,Skip1},
+ {Node2,Run2,Skip2}]}] = Tests,
+ [{TO1V,x_SUITE,all},
+ {TO1V,y_SUITE,[{g1,all},{g2,all},tc1,tc2]},
+ {TO1V,z_SUITE,all}] = lists:sort(Run1),
+ [{TO2V,x_SUITE,all},
+ {TO2V,y_SUITE,all}] = lists:sort(Run2),
+ [{TO1V,z_SUITE,"skipped"}] = lists:sort(Skip1),
+ [{TO2V,x_SUITE,{g1,all},"skipped"},
+ {TO2V,x_SUITE,{g2,all},"skipped"},
+ {TO2V,y_SUITE,tc1,"skipped"},
+ {TO2V,y_SUITE,tc2,"skipped"}] = lists:sort(Skip2),
+
+ ok.
%%%-----------------------------------------------------------------
%%%
@@ -439,7 +474,28 @@ no_merging(_Config) ->
[{y_SUITE,[{tc1,{skip,"skipped"}},
{tc2,{skip,"skipped"}}]}]}]},
- verify_result(Verify,ListResult,FileResult).
+ verify_result(Verify,ListResult,FileResult),
+
+ {ok,Tests} = ct_testspec:get_tests([SpecFile]),
+ ct:pal("ct_testspec:get_tests/1:~n~p~n", [Tests]),
+ [{[SpecFile],[{Node,[],[]},
+ {Node1,Run1,Skip1},
+ {Node2,Run2,Skip2}]}] = Tests,
+ [{TO1V,x_SUITE,all},
+ {TO1V,y_SUITE,[tc1,tc2]},
+ {TO1V,y_SUITE,[{g1,all},{g2,all}]},
+ {TO1V,z_SUITE,all}] = lists:sort(Run1),
+ [{TO2V,x_SUITE,all},
+ {TO2V,x_SUITE,[{skipped,g1,all},{skipped,g2,all}]},
+ {TO2V,y_SUITE,all},
+ {TO2V,y_SUITE,[{skipped,tc1},{skipped,tc2}]}] = lists:sort(Run2),
+ [{TO1V,z_SUITE,"skipped"}] = lists:sort(Skip1),
+ [{TO2V,x_SUITE,{g1,all},"skipped"},
+ {TO2V,x_SUITE,{g2,all},"skipped"},
+ {TO2V,y_SUITE,tc1,"skipped"},
+ {TO2V,y_SUITE,tc2,"skipped"}] = lists:sort(Skip2),
+
+ ok.
%%%-----------------------------------------------------------------
%%%
@@ -510,7 +566,25 @@ multiple_specs(_Config) ->
{y_SUITE,[all,{tc1,{skip,"skipped"}},
{tc2,{skip,"skipped"}}]}]}]},
- verify_result(Verify,FileResult,FileResult).
+ verify_result(Verify,FileResult,FileResult),
+
+ {ok,Tests} = ct_testspec:get_tests([[SpecFile1,SpecFile2]]),
+ ct:pal("ct_testspec:get_tests/1:~n~p~n", [Tests]),
+ [{[SpecFile1,SpecFile2],[{Node,[],[]},
+ {Node1,Run1,Skip1},
+ {Node2,Run2,Skip2}]}] = Tests,
+ [{TO1V,x_SUITE,all},
+ {TO1V,y_SUITE,[{g1,all},{g2,all},tc1,tc2]},
+ {TO1V,z_SUITE,all}] = lists:sort(Run1),
+ [{TO2V,x_SUITE,all},
+ {TO2V,y_SUITE,all}] = lists:sort(Run2),
+ [{TO1V,z_SUITE,"skipped"}] = lists:sort(Skip1),
+ [{TO2V,x_SUITE,{g1,all},"skipped"},
+ {TO2V,x_SUITE,{g2,all},"skipped"},
+ {TO2V,y_SUITE,tc1,"skipped"},
+ {TO2V,y_SUITE,tc2,"skipped"}] = lists:sort(Skip2),
+
+ ok.
%%%-----------------------------------------------------------------
%%%
diff --git a/lib/compiler/doc/src/compile.xml b/lib/compiler/doc/src/compile.xml
index bd488a39a5..ed04dac1c0 100644
--- a/lib/compiler/doc/src/compile.xml
+++ b/lib/compiler/doc/src/compile.xml
@@ -418,7 +418,7 @@ module.beam: module.erl \
without module prefix to local or imported functions before
trying with auto-imported BIFs. If the BIF is to be
called, use the <c>erlang</c> module prefix in the call, not
- <c>{ no_auto_import,[{F,A}, ...]}</c>.</p>
+ <c>{no_auto_import,[{F,A}, ...]}</c>.</p>
</note>
<p>If this option is written in the source code, as a
<c>-compile</c> directive, the syntax <c>F/A</c> can be used instead
@@ -439,6 +439,15 @@ module.beam: module.erl \
</p>
</item>
+ <tag><c>{extra_chunks, [{binary(), binary()}]}</c></tag>
+ <item>
+ <p>Pass extra chunks to be stored in the <c>.beam</c> file.
+ The extra chunks must be a list of tuples with a four byte
+ binary as chunk name followed by a binary with the chunk contents.
+ See <seealso marker="stdlib:beam_lib">beam_lib</seealso> for
+ more information.
+ </p>
+ </item>
</taglist>
<p>If warnings are turned on (option <c>report_warnings</c>
@@ -679,7 +688,7 @@ module.beam: module.erl \
<fsummary>Compiles a list of forms.</fsummary>
<desc>
<p>Is the same as
- <c>forms(File, [verbose,report_errors,report_warnings])</c>.
+ <c>forms(Forms, [verbose,report_errors,report_warnings])</c>.
</p>
</desc>
</func>
diff --git a/lib/compiler/src/Makefile b/lib/compiler/src/Makefile
index c37f731d8c..cf60355a40 100644
--- a/lib/compiler/src/Makefile
+++ b/lib/compiler/src/Makefile
@@ -126,7 +126,7 @@ ERL_COMPILE_FLAGS += +native
endif
ERL_COMPILE_FLAGS += +inline +warn_unused_import \
-Werror \
- -I../../stdlib/include -I$(EGEN) -W
+ -I../../stdlib/include -I$(EGEN) -W +warn_missing_spec
# ----------------------------------------------------
# Targets
diff --git a/lib/compiler/src/beam_a.erl b/lib/compiler/src/beam_a.erl
index 91e6d80da3..cdb32d5d55 100644
--- a/lib/compiler/src/beam_a.erl
+++ b/lib/compiler/src/beam_a.erl
@@ -25,6 +25,9 @@
-export([module/2]).
+-spec module(beam_asm:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_asm.erl b/lib/compiler/src/beam_asm.erl
index 9c8ed2277f..1bda185acd 100644
--- a/lib/compiler/src/beam_asm.erl
+++ b/lib/compiler/src/beam_asm.erl
@@ -21,23 +21,54 @@
-module(beam_asm).
--export([module/4]).
+-export([module/5]).
-export([encode/2]).
+-export_type([fail/0,label/0,reg/0,src/0,module_code/0,function_name/0]).
+
-import(lists, [map/2,member/2,keymember/3,duplicate/2,splitwith/2]).
-include("beam_opcodes.hrl").
-module(Code, Abst, SourceFile, Opts) ->
- {ok,assemble(Code, Abst, SourceFile, Opts)}.
+%% Common types for describing operands for BEAM instructions.
+-type reg_num() :: 0..1023.
+-type reg() :: {'x',reg_num()} | {'y',reg_num()}.
+-type src() :: reg() |
+ {'literal',term()} |
+ {'atom',atom()} |
+ {'integer',integer()} |
+ 'nil' |
+ {'float',float()}.
+-type label() :: pos_integer().
+-type fail() :: {'f',label() | 0}.
+
+%% asm_instruction() describes only the instructions that
+%% are used in BEAM files (as opposed to internal instructions
+%% used only during optimization).
+
+-type asm_instruction() :: atom() | tuple().
+
+-type function_name() :: atom().
+
+-type asm_function() ::
+ {'function',function_name(),arity(),label(),[asm_instruction()]}.
+
+-type module_code() ::
+ {module(),[_],[_],[asm_function()],pos_integer()}.
+
+-spec module(module_code(), [{binary(), binary()}], [_], [compile:option()], [compile:option()]) ->
+ {'ok',binary()}.
-assemble({Mod,Exp0,Attr0,Asm0,NumLabels}, Abst, SourceFile, Opts) ->
+module(Code, ExtraChunks, SourceFile, Opts, CompilerOpts) ->
+ {ok,assemble(Code, ExtraChunks, SourceFile, Opts, CompilerOpts)}.
+
+assemble({Mod,Exp0,Attr0,Asm0,NumLabels}, ExtraChunks, SourceFile, Opts, CompilerOpts) ->
{1,Dict0} = beam_dict:atom(Mod, beam_dict:new()),
{0,Dict1} = beam_dict:fname(atom_to_list(Mod) ++ ".erl", Dict0),
NumFuncs = length(Asm0),
{Asm,Attr} = on_load(Asm0, Attr0),
Exp = cerl_sets:from_list(Exp0),
{Code,Dict2} = assemble_1(Asm, Exp, Dict1, []),
- build_file(Code, Attr, Dict2, NumLabels, NumFuncs, Abst, SourceFile, Opts).
+ build_file(Code, Attr, Dict2, NumLabels, NumFuncs, ExtraChunks, SourceFile, Opts, CompilerOpts).
on_load(Fs0, Attr0) ->
case proplists:get_value(on_load, Attr0) of
@@ -80,7 +111,7 @@ assemble_function([H|T], Acc, Dict0) ->
assemble_function([], Code, Dict) ->
{Code, Dict}.
-build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
+build_file(Code, Attr, Dict, NumLabels, NumFuncs, ExtraChunks, SourceFile, Opts, CompilerOpts) ->
%% Create the code chunk.
CodeChunk = chunk(<<"Code">>,
@@ -92,9 +123,9 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
Code),
%% Create the atom table chunk.
-
- {NumAtoms, AtomTab} = beam_dict:atom_table(Dict),
- AtomChunk = chunk(<<"Atom">>, <<NumAtoms:32>>, AtomTab),
+ AtomEncoding = atom_encoding(CompilerOpts),
+ {NumAtoms, AtomTab} = beam_dict:atom_table(Dict, AtomEncoding),
+ AtomChunk = chunk(atom_chunk_name(AtomEncoding), <<NumAtoms:32>>, AtomTab),
%% Create the import table chunk.
@@ -155,21 +186,30 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
AttrChunk = chunk(<<"Attr">>, Attributes),
CompileChunk = chunk(<<"CInf">>, Compile),
- %% Create the abstract code chunk.
+ %% Compile all extra chunks.
- AbstChunk = chunk(<<"Abst">>, Abst),
+ CheckedChunks = [chunk(Key, Value) || {Key, Value} <- ExtraChunks],
%% Create IFF chunk.
Chunks = case member(slim, Opts) of
true ->
- [Essentials,AttrChunk,AbstChunk];
+ [Essentials,AttrChunk,CheckedChunks];
false ->
[Essentials,LocChunk,AttrChunk,
- CompileChunk,AbstChunk,LineChunk]
+ CompileChunk,CheckedChunks,LineChunk]
end,
build_form(<<"BEAM">>, Chunks).
+atom_encoding(Opts) ->
+ case proplists:get_bool(no_utf8_atoms, Opts) of
+ false -> utf8;
+ true -> latin1
+ end.
+
+atom_chunk_name(utf8) -> <<"AtU8">>;
+atom_chunk_name(latin1) -> <<"Atom">>.
+
%% finalize_fun_table(Essentials, MD5) -> FinalizedEssentials
%% Update the 'old_uniq' field in the entry for each fun in the
%% 'FunT' chunk. We'll use part of the MD5 for the module as a
@@ -439,6 +479,8 @@ encode_alloc_list_1([{floats,Floats}|T], Dict, Acc0) ->
encode_alloc_list_1([], Dict, Acc) ->
{iolist_to_binary(Acc),Dict}.
+-spec encode(non_neg_integer(), pos_integer()) -> iodata().
+
encode(Tag, N) when N < 0 ->
encode1(Tag, negative_to_bytes(N));
encode(Tag, N) when N < 16 ->
diff --git a/lib/compiler/src/beam_block.erl b/lib/compiler/src/beam_block.erl
index 6a35191f6e..6543e05e20 100644
--- a/lib/compiler/src/beam_block.erl
+++ b/lib/compiler/src/beam_block.erl
@@ -25,6 +25,9 @@
-export([module/2]).
-import(lists, [reverse/1,reverse/2,foldl/3,member/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_bs.erl b/lib/compiler/src/beam_bs.erl
index 2aed98d4e7..beb055b23d 100644
--- a/lib/compiler/src/beam_bs.erl
+++ b/lib/compiler/src/beam_bs.erl
@@ -25,6 +25,9 @@
-export([module/2]).
-import(lists, [mapfoldl/3,reverse/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc0}, _Opt) ->
{Fs,Lc} = mapfoldl(fun function/2, Lc0, Fs0),
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_bsm.erl b/lib/compiler/src/beam_bsm.erl
index ae1b34ba49..9a4e7fb133 100644
--- a/lib/compiler/src/beam_bsm.erl
+++ b/lib/compiler/src/beam_bsm.erl
@@ -60,19 +60,26 @@
%%% data structures or passed to BIFs.
%%%
+-type label() :: beam_asm:label().
+-type func_info() :: {beam_asm:reg(),boolean()}.
+
-record(btb,
- {f, %Gbtrees for all functions.
- index, %{Label,Code} index (for liveness).
- ok_br, %Labels that are OK.
- must_not_save, %Must not save position when
- % optimizing (reaches
- % bs_context_to_binary).
- must_save %Must save position when optimizing.
+ {f :: gb_trees:tree(label(), func_info()),
+ index :: beam_utils:code_index(), %{Label,Code} index (for liveness).
+ ok_br=gb_sets:empty() :: gb_sets:set(label()), %Labels that are OK.
+ must_not_save=false :: boolean(), %Must not save position when
+ % optimizing (reaches
+ % bs_context_to_binary).
+ must_save=false :: boolean() %Must save position when optimizing.
}).
+
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, Opts) ->
- D = #btb{f=btb_index(Fs0)},
- Fs = [function(F, D) || F <- Fs0],
+ FIndex = btb_index(Fs0),
+ Fs = [function(F, FIndex) || F <- Fs0],
Code = {Mod,Exp,Attr,Fs,Lc},
case proplists:get_bool(bin_opt_info, Opts) of
true ->
@@ -92,10 +99,10 @@ format_error({no_bin_opt,Reason}) ->
%%% Local functions.
%%%
-function({function,Name,Arity,Entry,Is}, D0) ->
+function({function,Name,Arity,Entry,Is}, FIndex) ->
try
Index = beam_utils:index_labels(Is),
- D = D0#btb{index=Index},
+ D = #btb{f=FIndex,index=Index},
{function,Name,Arity,Entry,btb_opt_1(Is, D, [])}
catch
Class:Error ->
@@ -179,15 +186,14 @@ btb_gen_save(false, _, Acc) -> Acc.
%% a bs_context_to_binary instruction.
%%
-btb_reaches_match(Is, RegList, D0) ->
+btb_reaches_match(Is, RegList, D) ->
try
Regs = btb_regs_from_list(RegList),
- D = D0#btb{ok_br=gb_sets:empty(),must_not_save=false,must_save=false},
#btb{must_not_save=MustNotSave,must_save=MustSave} =
- btb_reaches_match_1(Is, Regs, D),
- case MustNotSave and MustSave of
+ btb_reaches_match_1(Is, Regs, D),
+ case MustNotSave andalso MustSave of
true -> btb_error(must_and_must_not_save);
- _ -> {ok,MustSave}
+ false -> {ok,MustSave}
end
catch
throw:{error,_}=Error -> Error
diff --git a/lib/compiler/src/beam_clean.erl b/lib/compiler/src/beam_clean.erl
index 10805a3c36..b736d39f9c 100644
--- a/lib/compiler/src/beam_clean.erl
+++ b/lib/compiler/src/beam_clean.erl
@@ -26,6 +26,9 @@
-export([clean_labels/1]).
-import(lists, [map/2,foldl/3,reverse/1,filter/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,_}, Opts) ->
Order = [Lbl || {function,_,_,Lbl,_} <- Fs0],
All = foldl(fun({function,_,_,Lbl,_}=Func,D) -> dict:store(Lbl, Func, D) end,
@@ -39,6 +42,10 @@ module({Mod,Exp,Attr,Fs0,_}, Opts) ->
{ok,{Mod,Exp,Attr,Fs,Lc}}.
%% Remove all bs_save2/2 instructions not referenced by a bs_restore2/2.
+
+-spec bs_clean_saves([beam_utils:instruction()]) ->
+ [beam_utils:instruction()].
+
bs_clean_saves(Is) ->
Needed = bs_restores(Is, []),
bs_clean_saves_1(Is, gb_sets:from_list(Needed), []).
@@ -98,13 +105,18 @@ add_to_work_list(F, {Fs,Used}=Sets) ->
%%% want to see the expanded code in a .S file.
%%%
--record(st, {lmap, %Translation tables for labels.
- entry, %Number of entry label.
- lc %Label counter
+-type label() :: beam_asm:label().
+
+-record(st, {lmap :: [{label(),label()}], %Translation tables for labels.
+ entry :: beam_asm:label(), %Number of entry label.
+ lc :: non_neg_integer() %Label counter
}).
+-spec clean_labels([beam_utils:instruction()]) ->
+ {[beam_utils:instruction()],pos_integer()}.
+
clean_labels(Fs0) ->
- St0 = #st{lmap=[],lc=1},
+ St0 = #st{lmap=[],entry=1,lc=1},
{Fs1,#st{lmap=Lmap0,lc=Lc}} = function_renumber(Fs0, St0, []),
Lmap = gb_trees:from_orddict(ordsets:from_list(Lmap0)),
Fs = function_replace(Fs1, Lmap, []),
diff --git a/lib/compiler/src/beam_dead.erl b/lib/compiler/src/beam_dead.erl
index 9087586b58..d379fdc4eb 100644
--- a/lib/compiler/src/beam_dead.erl
+++ b/lib/compiler/src/beam_dead.erl
@@ -29,6 +29,10 @@
-import(lists, [mapfoldl/3,reverse/1]).
+
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,_}, _Opts) ->
{Fs1,Lc1} = beam_clean:clean_labels(Fs0),
{Fs,Lc} = mapfoldl(fun function/2, Lc1, Fs1),
diff --git a/lib/compiler/src/beam_dict.erl b/lib/compiler/src/beam_dict.erl
index 9565ab74c4..990e86062a 100644
--- a/lib/compiler/src/beam_dict.erl
+++ b/lib/compiler/src/beam_dict.erl
@@ -24,11 +24,11 @@
-export([new/0,opcode/2,highest_opcode/1,
atom/2,local/4,export/4,import/4,
string/2,lambda/3,literal/2,line/2,fname/2,
- atom_table/1,local_table/1,export_table/1,import_table/1,
+ atom_table/2,local_table/1,export_table/1,import_table/1,
string_table/1,lambda_table/1,literal_table/1,
line_table/1]).
--type label() :: non_neg_integer().
+-type label() :: beam_asm:label().
-type index() :: non_neg_integer().
@@ -38,13 +38,16 @@
-type line_tab() :: #{{Fname :: index(), Line :: term()} => index()}.
-type literal_tab() :: dict:dict(Literal :: term(), index()).
+-type lambda_info() :: {label(),{index(),label(),non_neg_integer()}}.
+-type lambda_tab() :: {non_neg_integer(),[lambda_info()]}.
+
-record(asm,
{atoms = #{} :: atom_tab(),
exports = [] :: [{label(), arity(), label()}],
locals = [] :: [{label(), arity(), label()}],
imports = gb_trees:empty() :: import_tab(),
strings = <<>> :: binary(), %String pool
- lambdas = {0,[]}, %[{...}]
+ lambdas = {0,[]} :: lambda_tab(),
literals = dict:new() :: literal_tab(),
fnames = #{} :: fname_tab(),
lines = #{} :: line_tab(),
@@ -148,10 +151,7 @@ string(Str, Dict) when is_list(Str) ->
lambda(Lbl, NumFree, #asm{lambdas={OldIndex,Lambdas0}}=Dict) ->
%% Set Index the same as OldIndex.
Index = OldIndex,
- %% Initialize OldUniq to 0. It will be set to an unique value
- %% based on the MD5 checksum of the BEAM code for the module.
- OldUniq = 0,
- Lambdas = [{Lbl,{OldIndex,Lbl,Index,NumFree,OldUniq}}|Lambdas0],
+ Lambdas = [{Lbl,{Index,Lbl,NumFree}}|Lambdas0],
{OldIndex,Dict#asm{lambdas={OldIndex+1,Lambdas}}}.
%% Returns the index for a literal (adding it to the literal table if necessary).
@@ -185,6 +185,9 @@ line([{location,Name,Line}], #asm{lines=Lines,num_lines=N}=Dict0) ->
{Index, Dict1#asm{lines=Lines#{Key=>Index},num_lines=N+1}}
end.
+-spec fname(nonempty_string(), bdict()) ->
+ {non_neg_integer(), bdict()}.
+
fname(Name, #asm{fnames=Fnames}=Dict) ->
case Fnames of
#{Name := Index} -> {Index,Dict};
@@ -194,15 +197,15 @@ fname(Name, #asm{fnames=Fnames}=Dict) ->
end.
%% Returns the atom table.
-%% atom_table(Dict) -> {LastIndex,[Length,AtomString...]}
--spec atom_table(bdict()) -> {non_neg_integer(), [[non_neg_integer(),...]]}.
+%% atom_table(Dict, Encoding) -> {LastIndex,[Length,AtomString...]}
+-spec atom_table(bdict(), latin1 | utf8) -> {non_neg_integer(), [[non_neg_integer(),...]]}.
-atom_table(#asm{atoms=Atoms}) ->
+atom_table(#asm{atoms=Atoms}, Encoding) ->
NumAtoms = maps:size(Atoms),
Sorted = lists:keysort(2, maps:to_list(Atoms)),
{NumAtoms,[begin
- L = atom_to_list(A),
- [length(L)|L]
+ L = atom_to_binary(A, Encoding),
+ [byte_size(L),L]
end || {A,_} <- Sorted]}.
%% Returns the table of local functions.
@@ -239,8 +242,11 @@ lambda_table(#asm{locals=Loc0,lambdas={NumLambdas,Lambdas0}}) ->
Lambdas1 = sofs:relation(Lambdas0),
Loc = sofs:relation([{Lbl,{F,A}} || {F,A,Lbl} <- Loc0]),
Lambdas2 = sofs:relative_product1(Lambdas1, Loc),
+ %% Initialize OldUniq to 0. It will be set to an unique value
+ %% based on the MD5 checksum of the BEAM code for the module.
+ OldUniq = 0,
Lambdas = [<<F:32,A:32,Lbl:32,Index:32,NumFree:32,OldUniq:32>> ||
- {{_,Lbl,Index,NumFree,OldUniq},{F,A}} <- sofs:to_external(Lambdas2)],
+ {{Index,Lbl,NumFree},{F,A}} <- sofs:to_external(Lambdas2)],
{NumLambdas,Lambdas}.
%% Returns the literal table.
diff --git a/lib/compiler/src/beam_except.erl b/lib/compiler/src/beam_except.erl
index 4a181c1923..9801c68ee2 100644
--- a/lib/compiler/src/beam_except.erl
+++ b/lib/compiler/src/beam_except.erl
@@ -33,6 +33,9 @@
-import(lists, [reverse/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
@@ -49,9 +52,9 @@ function({function,Name,Arity,CLabel,Is0}) ->
end.
-record(st,
- {lbl, %func_info label
- loc, %location for func_info
- arity %arity for function
+ {lbl :: beam_asm:label(), %func_info label
+ loc :: [_], %location for func_info
+ arity :: arity() %arity for function
}).
function_1(Is0) ->
diff --git a/lib/compiler/src/beam_flatten.erl b/lib/compiler/src/beam_flatten.erl
index c9ff07b496..a4d45a4ca6 100644
--- a/lib/compiler/src/beam_flatten.erl
+++ b/lib/compiler/src/beam_flatten.erl
@@ -25,6 +25,9 @@
-import(lists, [reverse/1,reverse/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs,Lc}, _Opt) ->
{ok,{Mod,Exp,Attr,[function(F) || F <- Fs],Lc}}.
diff --git a/lib/compiler/src/beam_jump.erl b/lib/compiler/src/beam_jump.erl
index e096270d8c..4365451356 100644
--- a/lib/compiler/src/beam_jump.erl
+++ b/lib/compiler/src/beam_jump.erl
@@ -130,6 +130,11 @@
-import(lists, [reverse/1,reverse/2,foldl/3]).
+-type instruction() :: beam_utils:instruction().
+
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
@@ -269,9 +274,9 @@ extract_seq_1(_, _) -> no.
-record(st,
{
- entry, %Entry label (must not be moved).
- mlbl, %Moved labels.
- labels :: cerl_sets:set() %Set of referenced labels.
+ entry :: beam_asm:label(), %Entry label (must not be moved).
+ mlbl :: #{beam_asm:label() := [beam_asm:label()]}, %Moved labels.
+ labels :: cerl_sets:set() %Set of referenced labels.
}).
opt(Is0, CLabel) ->
@@ -453,6 +458,8 @@ is_label_used(L, St) ->
%% is_unreachable_after(Instruction) -> boolean()
%% Test whether the code after Instruction is unreachable.
+-spec is_unreachable_after(instruction()) -> boolean().
+
is_unreachable_after({func_info,_M,_F,_A}) -> true;
is_unreachable_after(return) -> true;
is_unreachable_after({jump,_Lbl}) -> true;
@@ -465,6 +472,8 @@ is_unreachable_after(I) -> is_exit_instruction(I).
%% Test whether the instruction Instruction always
%% causes an exit/failure.
+-spec is_exit_instruction(instruction()) -> boolean().
+
is_exit_instruction({call_ext,_,{extfunc,M,F,A}}) ->
erl_bifs:is_exit_bif(M, F, A);
is_exit_instruction(if_end) -> true;
@@ -477,6 +486,8 @@ is_exit_instruction(_) -> false.
%% Remove all unused labels. Also remove unreachable
%% instructions following labels that are removed.
+-spec remove_unused_labels([instruction()]) -> [instruction()].
+
remove_unused_labels(Is) ->
Used0 = initial_labels(Is),
Used = foldl(fun ulbl/2, Used0, Is),
diff --git a/lib/compiler/src/beam_listing.erl b/lib/compiler/src/beam_listing.erl
index d82ed8639d..94b47cf568 100644
--- a/lib/compiler/src/beam_listing.erl
+++ b/lib/compiler/src/beam_listing.erl
@@ -21,14 +21,24 @@
-export([module/2]).
+-include("core_parse.hrl").
+-include("v3_kernel.hrl").
-include("v3_life.hrl").
-import(lists, [foreach/2]).
-module(File, Core) when element(1, Core) == c_module ->
+-type code() :: cerl:c_module()
+ | beam_utils:module_code()
+ | #k_mdef{}
+ | {module(),_,_,_} %v3_life
+ | [_]. %form-based format
+
+-spec module(file:io_device(), code()) -> 'ok'.
+
+module(File, #c_module{}=Core) ->
%% This is a core module.
io:put_chars(File, core_pp:format(Core));
-module(File, Kern) when element(1, Kern) == k_mdef ->
+module(File, #k_mdef{}=Kern) ->
%% This is a kernel module.
io:put_chars(File, v3_kernel_pp:format(Kern));
%%io:put_chars(File, io_lib:format("~p~n", [Kern]));
diff --git a/lib/compiler/src/beam_peep.erl b/lib/compiler/src/beam_peep.erl
index c8bef31824..6df5c02334 100644
--- a/lib/compiler/src/beam_peep.erl
+++ b/lib/compiler/src/beam_peep.erl
@@ -24,6 +24,9 @@
-import(lists, [reverse/1,member/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,_}, _Opts) ->
%% First coalesce adjacent labels.
{Fs1,Lc} = beam_clean:clean_labels(Fs0),
diff --git a/lib/compiler/src/beam_receive.erl b/lib/compiler/src/beam_receive.erl
index 89cafe27ce..1403e1e05e 100644
--- a/lib/compiler/src/beam_receive.erl
+++ b/lib/compiler/src/beam_receive.erl
@@ -65,6 +65,9 @@
%%% as the SomeUniqInteger.
%%%
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
Fs = [function(F) || F <- Fs0],
Code = {Mod,Exp,Attr,Fs,Lc},
diff --git a/lib/compiler/src/beam_reorder.erl b/lib/compiler/src/beam_reorder.erl
index 6a7c033ec6..910b7f6b0a 100644
--- a/lib/compiler/src/beam_reorder.erl
+++ b/lib/compiler/src/beam_reorder.erl
@@ -23,6 +23,9 @@
-export([module/2]).
-import(lists, [member/2,reverse/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_split.erl b/lib/compiler/src/beam_split.erl
index feeab0af50..d041f18806 100644
--- a/lib/compiler/src/beam_split.erl
+++ b/lib/compiler/src/beam_split.erl
@@ -23,6 +23,9 @@
-import(lists, [reverse/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
Fs = [split_blocks(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_trim.erl b/lib/compiler/src/beam_trim.erl
index d40669083e..4da0985085 100644
--- a/lib/compiler/src/beam_trim.erl
+++ b/lib/compiler/src/beam_trim.erl
@@ -24,10 +24,13 @@
-import(lists, [reverse/1,reverse/2,splitwith/2,sort/1]).
-record(st,
- {safe, %Safe labels.
- lbl %Code at each label.
+ {safe :: gb_sets:set(beam_asm:label()), %Safe labels.
+ lbl :: beam_utils:code_index() %Code at each label.
}).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_type.erl b/lib/compiler/src/beam_type.erl
index 9866dcd070..050c599d6b 100644
--- a/lib/compiler/src/beam_type.erl
+++ b/lib/compiler/src/beam_type.erl
@@ -26,6 +26,9 @@
-import(lists, [filter/2,foldl/3,keyfind/3,member/2,
reverse/1,reverse/2,sort/1]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opts) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index ffeff9ea81..cc6e54ca16 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -28,11 +28,31 @@
live_opt/1,delete_live_annos/1,combine_heap_needs/2,
split_even/1]).
+-export_type([code_index/0,module_code/0,instruction/0]).
+
-import(lists, [member/2,sort/1,reverse/1,splitwith/2]).
+%% instruction() describes all instructions that are used during optimzation
+%% (from beam_a to beam_z).
+-type instruction() :: atom() | tuple().
+
+-type code_index() :: gb_trees:tree(beam_asm:label(), [instruction()]).
+
+-type int_function() :: {'function',beam_asm:function_name(),arity(),
+ beam_asm:label(),[instruction()]}.
+
+-type module_code() ::
+ {module(),[_],[_],[int_function()],pos_integer()}.
+
+%% Internal types.
+-type fail() :: beam_asm:fail() | 'fail'.
+-type test() :: {'test',atom(),fail(),[beam_asm:src()]} |
+ {'test',atom(),fail(),integer(),list(),beam_asm:reg()}.
+-type result_cache() :: gb_trees:tree(beam_asm:label(), 'killed' | 'used').
+
-record(live,
- {lbl, %Label to code index.
- res}). %Result cache for each label.
+ {lbl :: code_index(), %Label to code index.
+ res :: result_cache()}). %Result cache for each label.
%% is_killed_block(Register, [Instruction]) -> true|false
@@ -44,6 +64,8 @@
%% i.e. it is OK to enter the instruction sequence with Register
%% containing garbage.
+-spec is_killed_block(beam_asm:reg(), [instruction()]) -> boolean().
+
is_killed_block({x,X}, [{set,_,_,{alloc,Live,_}}|_]) ->
X >= Live;
is_killed_block(R, [{set,Ds,Ss,_Op}|Is]) ->
@@ -65,6 +87,8 @@ is_killed_block(_, []) -> false.
%% The state (constructed by index_instructions/1) is used to allow us
%% to determine the kill state across branches.
+-spec is_killed(beam_asm:reg(), [instruction()], code_index()) -> boolean().
+
is_killed(R, Is, D) ->
St = #live{lbl=D,res=gb_trees:empty()},
case check_liveness(R, Is, St) of
@@ -75,6 +99,8 @@ is_killed(R, Is, D) ->
%% is_killed_at(Reg, Lbl, State) -> true|false
%% Determine whether Reg is killed at label Lbl.
+-spec is_killed_at(beam_asm:reg(), beam_asm:label(), code_index()) -> boolean().
+
is_killed_at(R, Lbl, D) when is_integer(Lbl) ->
St0 = #live{lbl=D,res=gb_trees:empty()},
case check_liveness_at(R, Lbl, St0) of
@@ -89,6 +115,8 @@ is_killed_at(R, Lbl, D) when is_integer(Lbl) ->
%% The state is used to allow us to determine the usage state
%% across branches.
+-spec is_not_used(beam_asm:reg(), [instruction()], code_index()) -> boolean().
+
is_not_used(R, Is, D) ->
St = #live{lbl=D,res=gb_trees:empty()},
case check_liveness(R, Is, St) of
@@ -100,18 +128,25 @@ is_not_used(R, Is, D) ->
%% Index the instruction sequence so that we can quickly
%% look up the instruction following a specific label.
+-spec index_labels([instruction()]) -> code_index().
+
index_labels(Is) ->
index_labels_1(Is, []).
%% empty_label_index() -> State
%% Create an empty label index.
+-spec empty_label_index() -> code_index().
+
empty_label_index() ->
gb_trees:empty().
%% index_label(Label, [Instruction], State) -> State
%% Add an index for a label.
+-spec index_label(beam_asm:label(), [instruction()], code_index()) ->
+ code_index().
+
index_label(Lbl, Is0, Acc) ->
Is = drop_labels(Is0),
gb_trees:enter(Lbl, Is, Acc).
@@ -120,12 +155,16 @@ index_label(Lbl, Is0, Acc) ->
%% code_at(Label, State) -> [I].
%% Retrieve the code at the given label.
+-spec code_at(beam_asm:label(), code_index()) -> [instruction()].
+
code_at(L, Ll) ->
gb_trees:get(L, Ll).
%% bif_to_test(Bif, [Op], Fail) -> {test,Test,Fail,[Op]}
%% Convert a BIF to a test. Fail if not possible.
+-spec bif_to_test(atom(), list(), fail()) -> test().
+
bif_to_test(is_atom, [_]=Ops, Fail) -> {test,is_atom,Fail,Ops};
bif_to_test(is_boolean, [_]=Ops, Fail) -> {test,is_boolean,Fail,Ops};
bif_to_test(is_binary, [_]=Ops, Fail) -> {test,is_binary,Fail,Ops};
@@ -158,6 +197,9 @@ bif_to_test(is_record, [_,_,_]=Ops, Fail) -> {test,is_record,Fail,Ops}.
%% Return 'true' if the test instruction does not modify any
%% registers and/or bit syntax matching state.
%%
+
+-spec is_pure_test(test()) -> boolean().
+
is_pure_test({test,is_eq,_,[_,_]}) -> true;
is_pure_test({test,is_ne,_,[_,_]}) -> true;
is_pure_test({test,is_eq_exact,_,[_,_]}) -> true;
@@ -180,7 +222,9 @@ is_pure_test({test,Op,_,Ops}) ->
%% whose destination is a register that will not be used.
%% Also insert {'%live',Live,Regs} annotations at the beginning
%% and end of each block.
-%%
+
+-spec live_opt([instruction()]) -> [instruction()].
+
live_opt(Is0) ->
{[{label,Fail}|_]=Bef,[Fi|Is]} =
splitwith(fun({func_info,_,_,_}) -> false;
@@ -193,7 +237,9 @@ live_opt(Is0) ->
%% delete_live_annos([Instruction]) -> [Instruction].
%% Delete all live annotations.
-%%
+
+-spec delete_live_annos([instruction()]) -> [instruction()].
+
delete_live_annos([{block,Bl0}|Is]) ->
case delete_live_annos(Bl0) of
[] -> delete_live_annos(Is);
@@ -208,6 +254,8 @@ delete_live_annos([]) -> [].
%% combine_heap_needs(HeapNeed1, HeapNeed2) -> HeapNeed
%% Combine the heap need for two allocation instructions.
+-spec combine_heap_needs(term(), term()) -> term().
+
combine_heap_needs({alloc,Alloc1}, {alloc,Alloc2}) ->
{alloc,combine_alloc_lists(Alloc1, Alloc2)};
combine_heap_needs({alloc,Alloc}, Words) when is_integer(Words) ->
@@ -220,6 +268,8 @@ combine_heap_needs(H1, H2) when is_integer(H1), is_integer(H2) ->
%% split_even/1
%% [1,2,3,4,5,6] -> {[1,3,5],[2,4,6]}
+-spec split_even(list()) -> {list(),list()}.
+
split_even(Rs) -> split_even(Rs, [], []).
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index 5659077c5d..bf33ae0aeb 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -32,6 +32,10 @@
-import(lists, [reverse/1,foldl/3,foreach/2,dropwhile/2]).
%% To be called by the compiler.
+
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_utils:module_code()}.
+
module({Mod,Exp,Attr,Fs,Lc}=Code, _Opts)
when is_atom(Mod), is_list(Exp), is_list(Attr), is_integer(Lc) ->
case validate(Mod, Fs) of
diff --git a/lib/compiler/src/beam_z.erl b/lib/compiler/src/beam_z.erl
index 6c7f8543c2..787e33c142 100644
--- a/lib/compiler/src/beam_z.erl
+++ b/lib/compiler/src/beam_z.erl
@@ -26,6 +26,9 @@
-import(lists, [dropwhile/2]).
+-spec module(beam_utils:module_code(), [compile:option()]) ->
+ {'ok',beam_asm:module_code()}.
+
module({Mod,Exp,Attr,Fs0,Lc}, _Opt) ->
Fs = [function(F) || F <- Fs0],
{ok,{Mod,Exp,Attr,Fs,Lc}}.
diff --git a/lib/compiler/src/cerl.erl b/lib/compiler/src/cerl.erl
index b1be6ffc6d..6b936a7687 100644
--- a/lib/compiler/src/cerl.erl
+++ b/lib/compiler/src/cerl.erl
@@ -1584,6 +1584,8 @@ ann_make_list(_, [], Node) ->
%% @doc Returns <code>true</code> if <code>Node</code> is an abstract
%% map constructor, otherwise <code>false</code>.
+-type map_op() :: #c_literal{val::'assoc'} | #c_literal{val::'exact'}.
+
-spec is_c_map(cerl()) -> boolean().
is_c_map(#c_map{}) ->
@@ -1679,8 +1681,16 @@ update_c_map(#c_map{is_pat=true}=Old, M, Es) ->
update_c_map(#c_map{is_pat=false}=Old, M, Es) ->
ann_c_map(get_ann(Old), M, Es).
+-spec map_pair_key(c_map_pair()) -> cerl().
+
map_pair_key(#c_map_pair{key=K}) -> K.
+
+-spec map_pair_val(c_map_pair()) -> cerl().
+
map_pair_val(#c_map_pair{val=V}) -> V.
+
+-spec map_pair_op(c_map_pair()) -> map_op().
+
map_pair_op(#c_map_pair{op=Op}) -> Op.
-spec c_map_pair(cerl(), cerl()) -> c_map_pair().
@@ -1699,6 +1709,8 @@ c_map_pair_exact(Key,Val) ->
ann_c_map_pair(As,Op,K,V) ->
#c_map_pair{op=Op, key = K, val=V, anno = As}.
+-spec update_c_map_pair(c_map_pair(), map_op(), cerl(), cerl()) -> c_map_pair().
+
update_c_map_pair(Old,Op,K,V) ->
#c_map_pair{op=Op, key=K, val=V, anno = get_ann(Old)}.
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index 1df6c1d316..c849306c0d 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -173,17 +173,25 @@ env_default_opts() ->
do_compile(Input, Opts0) ->
Opts = expand_opts(Opts0),
- {Pid,Ref} =
- spawn_monitor(fun() ->
- exit(try
- internal(Input, Opts)
- catch
- error:Reason ->
- {error,Reason}
- end)
- end),
- receive
- {'DOWN',Ref,process,Pid,Rep} -> Rep
+ IntFun = fun() -> try
+ internal(Input, Opts)
+ catch
+ error:Reason ->
+ {error,Reason}
+ end
+ end,
+ %% Dialyzer has already spawned workers.
+ case lists:member(dialyzer, Opts) of
+ true ->
+ IntFun();
+ false ->
+ {Pid,Ref} =
+ spawn_monitor(fun() ->
+ exit(IntFun())
+ end),
+ receive
+ {'DOWN',Ref,process,Pid,Rep} -> Rep
+ end
end.
expand_opts(Opts0) ->
@@ -206,11 +214,21 @@ expand_opt(report, Os) ->
expand_opt(return, Os) ->
[return_errors,return_warnings|Os];
expand_opt(r12, Os) ->
- [no_recv_opt,no_line_info|Os];
+ [no_recv_opt,no_line_info,no_utf8_atoms|Os];
expand_opt(r13, Os) ->
- [no_recv_opt,no_line_info|Os];
+ [no_recv_opt,no_line_info,no_utf8_atoms|Os];
expand_opt(r14, Os) ->
- [no_line_info|Os];
+ [no_line_info,no_utf8_atoms|Os];
+expand_opt(r15, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r16, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r17, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r18, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r19, Os) ->
+ [no_utf8_atoms|Os];
expand_opt({debug_info_key,_}=O, Os) ->
[encrypt_debug_info,O|Os];
expand_opt(no_float_opt, Os) ->
@@ -220,6 +238,8 @@ expand_opt(O, Os) -> [O|Os].
%% format_error(ErrorDescriptor) -> string()
+-spec format_error(term()) -> iolist().
+
format_error(no_native_support) ->
"this system is not configured for native-code compilation.";
format_error(no_crypto) ->
@@ -280,32 +300,40 @@ format_error_reason({Reason, Stack}) when is_list(Stack) ->
format_error_reason(Reason) ->
io_lib:format("~tp", [Reason]).
+-type err_warn_info() :: tuple().
+
%% The compile state record.
-record(compile, {filename="" :: file:filename(),
dir="" :: file:filename(),
base="" :: file:filename(),
ifile="" :: file:filename(),
ofile="" :: file:filename(),
- module=[],
- core_code=[],
- abstract_code=[], %Abstract code for debugger.
- options=[] :: [option()], %Options for compilation
+ module=[] :: module() | [],
+ core_code=[] :: cerl:c_module() | [],
+ abstract_code=[] :: binary() | [], %Abstract code for debugger.
+ options=[] :: [option()], %Options for compilation
mod_options=[] :: [option()], %Options for module_info
encoding=none :: none | epp:source_encoding(),
- errors=[],
- warnings=[]}).
+ errors=[] :: [err_warn_info()],
+ warnings=[] :: [err_warn_info()],
+ extra_chunks=[] :: [{binary(), binary()}]}).
internal({forms,Forms}, Opts0) ->
{_,Ps} = passes(forms, Opts0),
Source = proplists:get_value(source, Opts0, ""),
Opts1 = proplists:delete(source, Opts0),
- Compile = #compile{options=Opts1,mod_options=Opts1},
+ Compile = build_compile(Opts1),
internal_comp(Ps, Forms, Source, "", Compile);
internal({file,File}, Opts) ->
{Ext,Ps} = passes(file, Opts),
- Compile = #compile{options=Opts,mod_options=Opts},
+ Compile = build_compile(Opts),
internal_comp(Ps, none, File, Ext, Compile).
+build_compile(Opts0) ->
+ ExtraChunks = proplists:get_value(extra_chunks, Opts0, []),
+ Opts1 = proplists:delete(extra_chunks, Opts0),
+ #compile{options=Opts1,mod_options=Opts1,extra_chunks=ExtraChunks}.
+
internal_comp(Passes, Code0, File, Suffix, St0) ->
Dir = filename:dirname(File),
Base = filename:basename(File, Suffix),
@@ -1364,13 +1392,15 @@ encrypt({des3_cbc=Type,Key,IVec,BlockSize}, Bin0) ->
save_core_code(Code, St) ->
{ok,Code,St#compile{core_code=cerl:from_records(Code)}}.
-beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,mod_options=Opts0}=St) ->
+beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,extra_chunks=ExtraChunks,
+ options=CompilerOpts,mod_options=Opts0}=St) ->
Source = paranoid_absname(File),
Opts1 = lists:map(fun({debug_info_key,_}) -> {debug_info_key,'********'};
(Other) -> Other
end, Opts0),
Opts2 = [O || O <- Opts1, effects_code_generation(O)],
- case beam_asm:module(Code0, Abst, Source, Opts2) of
+ Chunks = [{<<"Abst">>, Abst} | ExtraChunks],
+ case beam_asm:module(Code0, Chunks, Source, Opts2, CompilerOpts) of
{ok,Code} -> {ok,Code,St#compile{abstract_code=[]}}
end.
@@ -1592,6 +1622,9 @@ list_errors(_F, []) -> ok.
%% tmpfile(ObjFile) -> TmpFile
%% Work out the correct input and output file names.
+-spec iofile(atom() | file:filename_all()) ->
+ {file:name_all(),file:name_all()}.
+
iofile(File) when is_atom(File) ->
iofile(atom_to_list(File));
iofile(File) ->
@@ -1726,6 +1759,8 @@ help(_) ->
%% compile(AbsFileName, Outfilename, Options)
%% Compile entry point for erl_compile.
+-spec compile(file:filename(), _, #options{}) -> 'ok' | 'error'.
+
compile(File0, _OutFile, Options) ->
pre_load(),
File = shorten_filename(File0),
@@ -1734,6 +1769,8 @@ compile(File0, _OutFile, Options) ->
Other -> Other
end.
+-spec compile_beam(file:filename(), _, #options{}) -> 'ok' | 'error'.
+
compile_beam(File0, _OutFile, Opts) ->
File = shorten_filename(File0),
case file(File, [from_beam|make_erl_options(Opts)]) of
@@ -1741,6 +1778,8 @@ compile_beam(File0, _OutFile, Opts) ->
Other -> Other
end.
+-spec compile_asm(file:filename(), _, #options{}) -> 'ok' | 'error'.
+
compile_asm(File0, _OutFile, Opts) ->
File = shorten_filename(File0),
case file(File, [from_asm|make_erl_options(Opts)]) of
@@ -1748,6 +1787,8 @@ compile_asm(File0, _OutFile, Opts) ->
Other -> Other
end.
+-spec compile_core(file:filename(), _, #options{}) -> 'ok' | 'error'.
+
compile_core(File0, _OutFile, Opts) ->
File = shorten_filename(File0),
case file(File, [from_core|make_erl_options(Opts)]) of
diff --git a/lib/compiler/src/core_scan.erl b/lib/compiler/src/core_scan.erl
index 11b52f6c5f..15bfc78c8b 100644
--- a/lib/compiler/src/core_scan.erl
+++ b/lib/compiler/src/core_scan.erl
@@ -49,13 +49,37 @@
-import(lists, [reverse/1]).
+-type location() :: integer().
+-type category() :: atom().
+-type symbol() :: atom() | float() | integer() | string().
+-type token() :: {category(), Anno :: location(), symbol()}
+ | {category(), Anno :: location()}.
+-type tokens() :: [token()].
+-type error_description() :: term().
+-type error_info() :: {erl_anno:location(), module(), error_description()}.
+
%% string([Char]) ->
%% string([Char], StartPos) ->
%% {ok, [Tok], EndPos} |
%% {error, {Pos,core_scan,What}, EndPos}
+-spec string(String) -> Return when
+ String :: string(),
+ Return :: {'ok', Tokens :: tokens(), EndLocation}
+ | {'error', ErrorInfo :: error_info(), ErrorLocation},
+ EndLocation :: location(),
+ ErrorLocation :: location().
+
string(Cs) -> string(Cs, 1).
+-spec string(String, StartLocation) -> Return when
+ String :: string(),
+ Return :: {'ok', Tokens :: tokens(), EndLocation}
+ | {'error', ErrorInfo :: error_info(), ErrorLocation},
+ StartLocation :: location(),
+ EndLocation :: location(),
+ ErrorLocation :: location().
+
string(Cs, Sp) ->
%% Add an 'eof' to always get correct handling.
case string_pre_scan(Cs, [], Sp) of
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index 50d28c0a5f..3673a339f6 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -1893,10 +1893,10 @@ case_opt_arg_1(E0, Cs0, LitExpr) ->
true ->
E = case_opt_compiler_generated(E0),
Cs = case_opt_nomatch(E, Cs0, LitExpr),
- case cerl:data_type(E) of
- {atomic,_} ->
+ case cerl:is_literal(E) of
+ true ->
case_opt_lit(E, Cs);
- _ ->
+ false ->
case_opt_data(E, Cs)
end
end.
diff --git a/lib/compiler/src/sys_pre_attributes.erl b/lib/compiler/src/sys_pre_attributes.erl
index bc93c85989..67adae5acf 100644
--- a/lib/compiler/src/sys_pre_attributes.erl
+++ b/lib/compiler/src/sys_pre_attributes.erl
@@ -25,10 +25,10 @@
-define(OPTION_TAG, attributes).
--record(state, {forms,
- pre_ops = [],
- post_ops = [],
- options}).
+-record(state, {forms :: [form()],
+ pre_ops = [] :: [op()],
+ post_ops = [] :: [op()],
+ options :: [option()]}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Inserts, deletes and replaces Erlang compiler attributes.
@@ -59,9 +59,23 @@
%% due to that the pre_transform pass did not find the attribute plus
%% all insert operations.
+-type attribute() :: atom().
+-type value() :: term().
+-type form() :: {function, integer(), atom(), arity(), _}
+ | {attribute, integer(), attribute(), _}.
+-type option() :: compile:option()
+ | {'attribute', 'insert', attribute(), value()}
+ | {'attribute', 'replace', attribute(), value()}
+ | {'attribute', 'delete', attribute()}.
+-type op() :: {'insert', attribute(), value()}
+ | {'replace', attribute(), value()}
+ | {'delete', attribute()}.
+
+-spec parse_transform([form()], [option()]) -> [form()].
+
parse_transform(Forms, Options) ->
S = #state{forms = Forms, options = Options},
- S2 = init_transform(S),
+ S2 = init_transform(Options, S),
report_verbose("Pre options: ~p~n", [S2#state.pre_ops], S2),
report_verbose("Post options: ~p~n", [S2#state.post_ops], S2),
S3 = pre_transform(S2),
@@ -71,13 +85,6 @@ parse_transform(Forms, Options) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Computes the lists of pre_ops and post_ops that are
%% used in the real transformation.
-init_transform(S) ->
- case S#state.options of
- Options when is_list(Options) ->
- init_transform(Options, S);
- Option ->
- init_transform([Option], S)
- end.
init_transform([{attribute, insert, Name, Val} | Tail], S) ->
Op = {insert, Name, Val},
@@ -92,12 +99,9 @@ init_transform([{attribute, delete, Name} | Tail], S) ->
Op = {delete, Name},
PreOps = [Op | S#state.pre_ops],
init_transform(Tail, S#state{pre_ops = PreOps});
-init_transform([], S) ->
- S;
init_transform([_ | T], S) ->
init_transform(T, S);
-init_transform(BadOpt, S) ->
- report_error("Illegal option (ignored): ~p~n", [BadOpt], S),
+init_transform([], S) ->
S.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -176,18 +180,9 @@ attrs([], _, _) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Report functions.
%%
-%% Errors messages are controlled with the 'report_errors' compiler option
%% Warning messages are controlled with the 'report_warnings' compiler option
%% Verbose messages are controlled with the 'verbose' compiler option
-report_error(Format, Args, S) ->
- case is_error(S) of
- true ->
- io:format("~p: * ERROR * " ++ Format, [?MODULE | Args]);
- false ->
- ok
- end.
-
report_warning(Format, Args, S) ->
case is_warning(S) of
true ->
@@ -204,9 +199,6 @@ report_verbose(Format, Args, S) ->
ok
end.
-is_error(S) ->
- lists:member(report_errors, S#state.options) or is_verbose(S).
-
is_warning(S) ->
lists:member(report_warnings, S#state.options) or is_verbose(S).
diff --git a/lib/compiler/src/v3_codegen.erl b/lib/compiler/src/v3_codegen.erl
index 3627cdb7cd..47c1567f10 100644
--- a/lib/compiler/src/v3_codegen.erl
+++ b/lib/compiler/src/v3_codegen.erl
@@ -69,6 +69,10 @@
stk=[], %Stack table
res=[]}). %Reserved regs: [{reserved,I,V}]
+-type life_module() :: {module(),_,_,[_]}.
+
+-spec module(life_module(), [compile:option()]) -> {'ok',beam_asm:module_code()}.
+
module({Mod,Exp,Attr,Forms}, _Options) ->
{Fs,St} = functions(Forms, {atom,Mod}),
{ok,{Mod,Exp,Attr,Fs,St#cg.lcount}}.
diff --git a/lib/compiler/src/v3_core.erl b/lib/compiler/src/v3_core.erl
index 14cd41ae27..8dea7ec03a 100644
--- a/lib/compiler/src/v3_core.erl
+++ b/lib/compiler/src/v3_core.erl
@@ -1059,13 +1059,30 @@ count_bits(Int) ->
count_bits_1(0, Bits) -> Bits;
count_bits_1(Int, Bits) -> count_bits_1(Int bsr 64, Bits+64).
-bin_expand_strings(Es) ->
- foldr(fun ({bin_element,Line,{string,_,S},Sz,Ts}, Es1) ->
- foldr(fun (C, Es2) ->
- [{bin_element,Line,{char,Line,C},Sz,Ts}|Es2]
- end, Es1, S);
- (E, Es1) -> [E|Es1]
- end, [], Es).
+bin_expand_strings(Es0) ->
+ foldr(fun ({bin_element,Line,{string,_,S},{integer,_,8},_}, Es) ->
+ bin_expand_string(S, Line, 0, 0) ++ Es;
+ ({bin_element,Line,{string,_,S},Sz,Ts}, Es1) ->
+ foldr(
+ fun (C, Es) ->
+ [{bin_element,Line,{char,Line,C},Sz,Ts}|Es]
+ end, Es1, S);
+ (E, Es) ->
+ [E|Es]
+ end, [], Es0).
+
+bin_expand_string(S, Line, Val, Size) when Size >= 2048 ->
+ Combined = make_combined(Line, Val, Size),
+ [Combined|bin_expand_string(S, Line, 0, 0)];
+bin_expand_string([H|T], Line, Val, Size) ->
+ bin_expand_string(T, Line, (Val bsl 8) bor H, Size+8);
+bin_expand_string([], Line, Val, Size) ->
+ [make_combined(Line, Val, Size)].
+
+make_combined(Line, Val, Size) ->
+ {bin_element,Line,{integer,Line,Val},
+ {integer,Line,Size},
+ [integer,{unit,1},unsigned,big]}.
expr_bin_1(Es, St) ->
foldr(fun (E, {Ces,Esp,St0}) ->
diff --git a/lib/compiler/src/v3_kernel_pp.erl b/lib/compiler/src/v3_kernel_pp.erl
index d5f6ee19c9..187e69a22c 100644
--- a/lib/compiler/src/v3_kernel_pp.erl
+++ b/lib/compiler/src/v3_kernel_pp.erl
@@ -47,7 +47,7 @@
canno(Cthing) -> element(2, Cthing).
--spec format(cerl:cerl()) -> iolist().
+-spec format(#k_mdef{}) -> iolist().
format(Node) -> format(Node, #ctxt{}).
diff --git a/lib/compiler/src/v3_life.erl b/lib/compiler/src/v3_life.erl
index 0f2aeda87f..be3ade47ff 100644
--- a/lib/compiler/src/v3_life.erl
+++ b/lib/compiler/src/v3_life.erl
@@ -52,10 +52,15 @@
-include("v3_kernel.hrl").
-include("v3_life.hrl").
+-type fa() :: {atom(),arity()}.
+
%% These are not defined in v3_kernel.hrl.
get_kanno(Kthing) -> element(2, Kthing).
%%set_kanno(Kthing, Anno) -> setelement(2, Kthing, Anno).
+-spec module(#k_mdef{}, [compile:option()]) ->
+ {'ok',{module(),[fa()],[_],[_]}}.
+
module(#k_mdef{name=M,exports=Es,attributes=As,body=Fs0}, _Opts) ->
Fs1 = functions(Fs0, []),
{ok,{M,Es,As,Fs1}}.
@@ -416,6 +421,10 @@ add_var(V, F, L, Vdb) ->
vdb_new(Vs) ->
sort([{V,0,0} || {var,V} <- Vs]).
+-type var() :: atom().
+
+-spec vdb_find(var(), [vdb_entry()]) -> 'error' | vdb_entry().
+
vdb_find(V, Vdb) ->
case lists:keyfind(V, 1, Vdb) of
false -> error;
diff --git a/lib/compiler/src/v3_life.hrl b/lib/compiler/src/v3_life.hrl
index 9d03a86ccd..5c76312067 100644
--- a/lib/compiler/src/v3_life.hrl
+++ b/lib/compiler/src/v3_life.hrl
@@ -20,8 +20,10 @@
%% This record contains variable life-time annotation for a
%% kernel expression. Added by v3_life, used by v3_codegen.
+-type vdb_entry() :: {atom(),non_neg_integer(),non_neg_integer()}.
+
-record(l, {ke, %Kernel expression
- i=0, %Op number
- vdb=[], %Variable database
- a}). %Core annotation
+ i=0 :: non_neg_integer(), %Op number
+ vdb=[] :: [vdb_entry()], %Variable database
+ a=[] :: [term()]}). %Core annotation
diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl
index 8c09414a52..10740ac2b0 100644
--- a/lib/compiler/test/compile_SUITE.erl
+++ b/lib/compiler/test/compile_SUITE.erl
@@ -30,7 +30,7 @@
file_1/1, forms_2/1, module_mismatch/1, big_file/1, outdir/1,
binary/1, makedep/1, cond_and_ifdef/1, listings/1, listings_big/1,
other_output/1, kernel_listing/1, encrypted_abstr/1,
- strict_record/1,
+ strict_record/1, utf8_atoms/1, extra_chunks/1,
cover/1, env/1, core/1,
core_roundtrip/1, asm/1, optimized_guards/1,
sys_pre_attributes/1, dialyzer/1,
@@ -48,7 +48,7 @@ all() ->
[app_test, appup_test, file_1, forms_2, module_mismatch, big_file, outdir,
binary, makedep, cond_and_ifdef, listings, listings_big,
other_output, kernel_listing, encrypted_abstr,
- strict_record,
+ strict_record, utf8_atoms, extra_chunks,
cover, env, core, core_roundtrip, asm, optimized_guards,
sys_pre_attributes, dialyzer, warnings, pre_load_check,
env_compiler_options].
@@ -450,8 +450,10 @@ do_kernel_listing({M,A}) ->
try
{ok,M,Kern} = compile:forms(A, [to_kernel]),
IoList = v3_kernel_pp:format(Kern),
- _ = iolist_size(IoList),
- ok
+ case unicode:characters_to_binary(IoList) of
+ Bin when is_binary(Bin) ->
+ ok
+ end
catch
throw:{error,Error} ->
io:format("*** compilation failure '~p' for module ~s\n",
@@ -680,6 +682,32 @@ test_sloppy() ->
{1,2} = record_access:test(Turtle),
Turtle.
+utf8_atoms(Config) when is_list(Config) ->
+ Anno = erl_anno:new(1),
+ Atom = binary_to_atom(<<"こんにちは"/utf8>>, utf8),
+ Forms = [{attribute,Anno,compile,[export_all]},
+ {function,Anno,atom,0,[{clause,Anno,[],[],[{atom,Anno,Atom}]}]}],
+
+ Utf8AtomForms = [{attribute,Anno,module,utf8_atom}|Forms],
+ {ok,utf8_atom,Utf8AtomBin} =
+ compile:forms(Utf8AtomForms, [binary]),
+ {ok,{utf8_atom,[{atoms,_}]}} =
+ beam_lib:chunks(Utf8AtomBin, [atoms]),
+ code:load_binary(utf8_atom, "compile_SUITE", Utf8AtomBin),
+ Atom = utf8_atom:atom(),
+
+ NoUtf8AtomForms = [{attribute,Anno,module,no_utf8_atom}|Forms],
+ error = compile:forms(NoUtf8AtomForms, [binary, r19]).
+
+extra_chunks(Config) when is_list(Config) ->
+ Anno = erl_anno:new(1),
+ Forms = [{attribute,Anno,module,extra_chunks}],
+
+ {ok,extra_chunks,ExtraChunksBinary} =
+ compile:forms(Forms, [binary, {extra_chunks, [{<<"ExCh">>, <<"Contents">>}]}]),
+ {ok,{extra_chunks,[{"ExCh",<<"Contents">>}]}} =
+ beam_lib:chunks(ExtraChunksBinary, ["ExCh"]).
+
env(Config) when is_list(Config) ->
{Simple,Target} = get_files(Config, simple, env),
{ok,Cwd} = file:get_cwd(),
@@ -751,7 +779,7 @@ do_core_1(M, A, Outdir) ->
{ok,M,Core0} = compile:forms(A, [to_core]),
CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
CorePP = core_pp:format(Core0),
- ok = file:write_file(CoreFile, CorePP),
+ ok = file:write_file(CoreFile, unicode:characters_to_binary(CorePP)),
%% Parse the .core file and return the result as Core Erlang Terms.
Core = case compile:file(CoreFile, [report_errors,from_core,no_copt,to_core,binary]) of
@@ -823,7 +851,7 @@ do_core_roundtrip_1(Mod, Abstr, Outdir) ->
do_core_roundtrip_2(M, Core0, Outdir) ->
CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
CorePP = core_pp:format_all(Core0),
- ok = file:write_file(CoreFile, CorePP),
+ ok = file:write_file(CoreFile, unicode:characters_to_binary(CorePP)),
%% Parse the .core file and return the result as Core Erlang Terms.
Core2 = case compile:file(CoreFile, [report_errors,from_core,
diff --git a/lib/compiler/test/compile_SUITE_data/simple.erl b/lib/compiler/test/compile_SUITE_data/simple.erl
index d8324dafaf..9385d101e0 100644
--- a/lib/compiler/test/compile_SUITE_data/simple.erl
+++ b/lib/compiler/test/compile_SUITE_data/simple.erl
@@ -19,7 +19,7 @@
%%
-module(simple).
--export([test/0]).
+-export([test/0,unicode/0]).
-ifdef(need_foo).
-export([foo/0]).
@@ -28,6 +28,9 @@
test() ->
passed.
+unicode() ->
+ {"это",'спутник'}.
+
%% Conditional inclusion.
%% Compile with [{d, need_foo}, {d, foo_value, 42}].
diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl
index 3cb49433ce..76dfaee482 100644
--- a/lib/compiler/test/lc_SUITE.erl
+++ b/lib/compiler/test/lc_SUITE.erl
@@ -19,7 +19,7 @@
%%
-module(lc_SUITE).
--export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
+-export([all/0, suite/0, groups/0, init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
init_per_testcase/2,end_per_testcase/2,
basic/1,deeply_nested/1,no_generator/1,
@@ -32,11 +32,11 @@ suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,1}}].
-all() ->
+all() ->
test_lib:recompile(?MODULE),
[{group,p}].
-groups() ->
+groups() ->
[{p,test_lib:parallel(),
[basic,
deeply_nested,
@@ -214,6 +214,7 @@ shadow(Config) when is_list(Config) ->
ok.
effect(Config) when is_list(Config) ->
+ ct:timetrap({minutes,10}),
[{42,{a,b,c}}] =
do_effect(fun(F, L) ->
[F({V1,V2}) ||
@@ -226,7 +227,7 @@ effect(Config) when is_list(Config) ->
lc_SUITE ->
_ = [{'EXIT',{badarg,_}} =
(catch binary_to_atom(<<C/utf8>>, utf8)) ||
- C <- lists:seq(16#10000, 16#FFFFF)];
+ C <- lists:seq(16#FF10000, 16#FFFFFFF)];
_ ->
ok
end,
@@ -240,7 +241,7 @@ do_effect(Lc, L) ->
lists:reverse(erase(?MODULE)).
id(I) -> I.
-
+
fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Args,_}|_]}}) -> ok;
fc(Args, {'EXIT',{function_clause,[{?MODULE,_,Arity,_}|_]}})
when length(Args) =:= Arity ->
diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl
index 36e82c1459..5e90b79aa2 100644
--- a/lib/compiler/test/map_SUITE.erl
+++ b/lib/compiler/test/map_SUITE.erl
@@ -1559,7 +1559,6 @@ t_warn_pair_key_overloaded(Config) when is_list(Config) ->
"hi2" => lists:subtract([1,2],[1]),
"hi3" => +3,
"hi1" => erlang:min(1,2),
- "hi1" => erlang:hash({1,2},35),
"hi1" => erlang:phash({1,2},33),
"hi1" => erlang:phash2({1,2},34),
"hi1" => erlang:integer_to_binary(1337),
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index 42cf7ac37b..b2f31870b9 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -61,7 +61,6 @@
#include <openssl/evp.h>
#include <openssl/hmac.h>
-
/* Helper macro to construct a OPENSSL_VERSION_NUMBER.
* See openssl/opensslv.h
*/
@@ -72,6 +71,46 @@
PACKED_OPENSSL_VERSION(MAJ,MIN,FIX,('a'-1))
+/* LibreSSL was cloned from OpenSSL 1.0.1g and claims to be API and BPI compatible
+ * with 1.0.1.
+ *
+ * LibreSSL has the same names on include files and symbols as OpenSSL, but defines
+ * the OPENSSL_VERSION_NUMBER to be >= 2.0.0
+ *
+ * Therefor works tests like this as intendend:
+ * OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
+ * (The test is for example "2.4.2" >= "1.0.0" although the test
+ * with the cloned OpenSSL test would be "1.0.1" >= "1.0.0")
+ *
+ * But tests like this gives wrong result:
+ * OPENSSL_VERSION_NUMBER < PACKED_OPENSSL_VERSION_PLAIN(1,1,0)
+ * (The test is false since "2.4.2" < "1.1.0". It should have been
+ * true because the LibreSSL API version is "1.0.1")
+ *
+ */
+
+#ifdef LIBRESSL_VERSION_NUMBER
+/* A macro to test on in this file */
+#define HAS_LIBRESSL
+#endif
+
+#ifdef HAS_LIBRESSL
+/* LibreSSL dislikes FIPS */
+# ifdef FIPS_SUPPORT
+# undef FIPS_SUPPORT
+# endif
+
+/* LibreSSL wants the 1.0.1 API */
+# define NEED_EVP_COMPATIBILITY_FUNCTIONS
+#endif
+
+
+#if OPENSSL_VERSION_NUMBER < PACKED_OPENSSL_VERSION_PLAIN(1,1,0)
+# define NEED_EVP_COMPATIBILITY_FUNCTIONS
+#endif
+
+
+
#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
#include <openssl/modes.h>
#endif
@@ -121,7 +160,9 @@
#endif
#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,1,0)
-# define HAVE_CHACHA20_POLY1305
+# ifndef HAS_LIBRESSL
+# define HAVE_CHACHA20_POLY1305
+# endif
#endif
#if OPENSSL_VERSION_NUMBER <= PACKED_OPENSSL_VERSION(0,9,8,'l')
@@ -206,8 +247,8 @@ do { \
} \
} while (0)
-#if OPENSSL_VERSION_NUMBER < PACKED_OPENSSL_VERSION_PLAIN(1,1,0)
+#ifdef NEED_EVP_COMPATIBILITY_FUNCTIONS
/*
* In OpenSSL 1.1.0, most structs are opaque. That means that
* the structs cannot be allocated as automatic variables on the
@@ -315,6 +356,7 @@ static INLINE int DSA_set0_pqg(DSA *d, BIGNUM *p, BIGNUM *q, BIGNUM *g)
static INLINE int DH_set0_key(DH *dh, BIGNUM *pub_key, BIGNUM *priv_key);
static INLINE int DH_set0_pqg(DH *dh, BIGNUM *p, BIGNUM *q, BIGNUM *g);
+static INLINE int DH_set_length(DH *dh, long length);
static INLINE void DH_get0_pqg(const DH *dh,
const BIGNUM **p, const BIGNUM **q, const BIGNUM **g);
static INLINE void DH_get0_key(const DH *dh,
@@ -335,6 +377,12 @@ static INLINE int DH_set0_pqg(DH *dh, BIGNUM *p, BIGNUM *q, BIGNUM *g)
return 1;
}
+static INLINE int DH_set_length(DH *dh, long length)
+{
+ dh->length = length;
+ return 1;
+}
+
static INLINE void
DH_get0_pqg(const DH *dh, const BIGNUM **p, const BIGNUM **q, const BIGNUM **g)
{
@@ -467,7 +515,7 @@ static ErlNifFunc nif_funcs[] = {
{"rsa_generate_key_nif", 2, rsa_generate_key_nif},
{"dh_generate_parameters_nif", 2, dh_generate_parameters_nif},
{"dh_check", 1, dh_check},
- {"dh_generate_key_nif", 3, dh_generate_key_nif},
+ {"dh_generate_key_nif", 4, dh_generate_key_nif},
{"dh_compute_key_nif", 3, dh_compute_key_nif},
{"srp_value_B_nif", 5, srp_value_B_nif},
{"srp_user_secret_nif", 7, srp_user_secret_nif},
@@ -3032,7 +3080,7 @@ static ERL_NIF_TERM dh_check(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]
}
static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (PrivKey, DHParams=[P,G], Mpint) */
+{/* (PrivKey|undefined, DHParams=[P,G], Mpint, Len|0) */
DH* dh_params;
int pub_len, prv_len;
unsigned char *pub_ptr, *prv_ptr;
@@ -3040,6 +3088,7 @@ static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_
int mpint; /* 0 or 4 */
BIGNUM *priv_key = NULL;
BIGNUM *dh_p = NULL, *dh_g = NULL;
+ unsigned long len = 0;
if (!(get_bn_from_bin(env, argv[0], &priv_key)
|| argv[0] == atom_undefined)
@@ -3048,8 +3097,10 @@ static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_
|| !enif_get_list_cell(env, tail, &head, &tail)
|| !get_bn_from_bin(env, head, &dh_g)
|| !enif_is_empty_list(env, tail)
- || !enif_get_int(env, argv[2], &mpint) || (mpint & ~4)) {
- if (priv_key) BN_free(priv_key);
+ || !enif_get_int(env, argv[2], &mpint) || (mpint & ~4)
+ || !enif_get_ulong(env, argv[3], &len) ) {
+
+ if (priv_key) BN_free(priv_key);
if (dh_p) BN_free(dh_p);
if (dh_g) BN_free(dh_g);
return enif_make_badarg(env);
@@ -3059,6 +3110,15 @@ static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_
DH_set0_key(dh_params, NULL, priv_key);
DH_set0_pqg(dh_params, dh_p, NULL, dh_g);
+ if (len) {
+ if (len < BN_num_bits(dh_p))
+ DH_set_length(dh_params, len);
+ else {
+ DH_free(dh_params);
+ return enif_make_badarg(env);
+ }
+ }
+
if (DH_generate_key(dh_params)) {
const BIGNUM *pub_key, *priv_key;
DH_get0_key(dh_params, &pub_key, &priv_key);
diff --git a/lib/crypto/c_src/crypto_callback.h b/lib/crypto/c_src/crypto_callback.h
index 2641cc0c8b..489810116f 100644
--- a/lib/crypto/c_src/crypto_callback.h
+++ b/lib/crypto/c_src/crypto_callback.h
@@ -19,7 +19,7 @@
*/
#include <openssl/crypto.h>
-#if OPENSSL_VERSION_NUMBER < 0x10100000L
+#ifdef NEED_EVP_COMPATIBILITY_FUNCTIONS
# define CCB_FILE_LINE_ARGS
#else
# define CCB_FILE_LINE_ARGS , const char *file, int line
diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml
index 3192ec0de8..d0deaceaaf 100644
--- a/lib/crypto/doc/src/crypto.xml
+++ b/lib/crypto/doc/src/crypto.xml
@@ -103,7 +103,7 @@
<code>dh_private() = key_value() </code>
- <code>dh_params() = [key_value()] = [P, G] </code>
+ <code>dh_params() = [key_value()] = [P, G] | [P, G, PrivateKeyBitLength]</code>
<code>ecdh_public() = key_value() </code>
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index 046fae674b..631af62615 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -159,10 +159,11 @@ cmac(Type, Key, Data, MacSize) ->
des3_cbc | des3_cbf | des3_cfb | des_ede3 |
blowfish_cbc | blowfish_cfb64 | blowfish_ofb64 |
aes_cbc128 | aes_cfb8 | aes_cfb128 | aes_cbc256 | aes_ige256 |
- aes_cbc |
+ aes_cbc |
rc2_cbc,
- Key::iodata(), Ivec::binary(), Data::iodata()) -> binary();
- (aes_gcm | chacha20_poly1305, Key::iodata(), Ivec::binary(), {AAD::binary(), Data::iodata()}) -> {binary(), binary()}.
+ Key::iodata(), Ivec::binary(), Data::iodata()) -> binary();
+ (aes_gcm | chacha20_poly1305, Key::iodata(), Ivec::binary(), {AAD::binary(), Data::iodata()}) -> {binary(), binary()};
+ (aes_gcm, Key::iodata(), Ivec::binary(), {AAD::binary(), Data::iodata(), TagLength::1..16}) -> {binary(), binary()}.
block_encrypt(Type, Key, Ivec, Data) when Type =:= des_cbc;
Type =:= des_cfb;
@@ -425,9 +426,15 @@ exor(Bin1, Bin2) ->
generate_key(Type, Params) ->
generate_key(Type, Params, undefined).
-generate_key(dh, DHParameters, PrivateKey) ->
+generate_key(dh, DHParameters0, PrivateKey) ->
+ {DHParameters, Len} =
+ case DHParameters0 of
+ [P,G,L] -> {[P,G], L};
+ [P,G] -> {[P,G], 0}
+ end,
dh_generate_key_nif(ensure_int_as_bin(PrivateKey),
- map_ensure_int_as_bin(DHParameters), 0);
+ map_ensure_int_as_bin(DHParameters),
+ 0, Len);
generate_key(srp, {host, [Verifier, Generator, Prime, Version]}, PrivArg)
when is_binary(Verifier), is_binary(Generator), is_binary(Prime), is_atom(Version) ->
@@ -820,7 +827,7 @@ dh_check([_Prime,_Gen]) -> ?nif_stub.
%% DHParameters = [P (Prime)= mpint(), G(Generator) = mpint()]
%% PrivKey = mpint()
-dh_generate_key_nif(_PrivateKey, _DHParameters, _Mpint) -> ?nif_stub.
+dh_generate_key_nif(_PrivateKey, _DHParameters, _Mpint, _Length) -> ?nif_stub.
%% DHParameters = [P (Prime)= mpint(), G(Generator) = mpint()]
%% MyPrivKey, OthersPublicKey = mpint()
diff --git a/lib/debugger/test/map_SUITE.erl b/lib/debugger/test/map_SUITE.erl
index 42484ff723..4d8a86f5a2 100644
--- a/lib/debugger/test/map_SUITE.erl
+++ b/lib/debugger/test/map_SUITE.erl
@@ -1714,10 +1714,8 @@ t_bif_map_values(Config) when is_list(Config) ->
t_erlang_hash(Config) when is_list(Config) ->
-
ok = t_bif_erlang_phash2(),
ok = t_bif_erlang_phash(),
- ok = t_bif_erlang_hash(),
ok.
t_bif_erlang_phash2() ->
@@ -1759,26 +1757,6 @@ t_bif_erlang_phash() ->
2620391445 = erlang:phash(M2,Sz), % 3590546636
ok.
-t_bif_erlang_hash() ->
- Sz = 1 bsl 27 - 1,
- 39684169 = erlang:hash(#{},Sz), % 5158
- 33673142 = erlang:hash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz), % 71555838
- 95337869 = erlang:hash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz), % 5497225
- 108959561 = erlang:hash(#{ 1 => a },Sz), % 126071654
- 59623150 = erlang:hash(#{ a => 1 },Sz), % 126426236
-
- 42775386 = erlang:hash(#{{} => <<>>},Sz), % 101655720
- 71692856 = erlang:hash(#{<<>> => {}},Sz), % 101655720
-
- M0 = #{ a => 1, "key" => <<"value">> },
- M1 = maps:remove("key",M0),
- M2 = M1#{ "key" => <<"value">> },
-
- 70254632 = erlang:hash(M0,Sz), % 38260486
- 59623150 = erlang:hash(M1,Sz), % 126426236
- 70254632 = erlang:hash(M2,Sz), % 38260486
- ok.
-
t_map_encode_decode(Config) when is_list(Config) ->
<<131,116,0,0,0,0>> = erlang:term_to_binary(#{}),
Pairs = [
diff --git a/lib/dialyzer/RELEASE_NOTES b/lib/dialyzer/RELEASE_NOTES
index 2457faa07a..299cc8642f 100644
--- a/lib/dialyzer/RELEASE_NOTES
+++ b/lib/dialyzer/RELEASE_NOTES
@@ -181,7 +181,7 @@ Version 1.8.0 (in Erlang/OTP R12B-2)
- Dialyzer has a new warning option -Wunmatched_returns which warns for
function calls that ignore the return value.
This catches many common programming errors (e.g. calling file:close/1
- and not checking for the absense of errors), interface discrepancies
+ and not checking for the absence of errors), interface discrepancies
(e.g. a function returning multiple values when in reality the function
is void and only called for its side-effects), calling the wrong function
(e.g. io_lib:format/1 instead of io:format/1), and even possible
diff --git a/lib/dialyzer/src/dialyzer.app.src b/lib/dialyzer/src/dialyzer.app.src
index 5b28f7ae86..f517c51ec1 100644
--- a/lib/dialyzer/src/dialyzer.app.src
+++ b/lib/dialyzer/src/dialyzer.app.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2006-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2006-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -48,5 +48,5 @@
{applications, [compiler, hipe, kernel, stdlib, wx]},
{env, []},
{runtime_dependencies, ["wx-1.2","syntax_tools-2.0","stdlib-3.0",
- "kernel-5.0","hipe-3.15.1","erts-8.0",
+ "kernel-5.0","hipe-3.15.4","erts-8.0",
"compiler-7.0"]}]}.
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index b5510731e0..aeeb895a0c 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -94,9 +94,9 @@ loop(#server_state{parent = Parent} = State,
{AnalPid, cserver, CServer, Plt} ->
send_codeserver_plt(Parent, CServer, Plt),
loop(State, Analysis, ExtCalls);
- {AnalPid, done, Plt, DocPlt} ->
+ {AnalPid, done, MiniPlt, DocPlt} ->
send_ext_calls(Parent, ExtCalls),
- send_analysis_done(Parent, Plt, DocPlt);
+ send_analysis_done(Parent, MiniPlt, DocPlt);
{AnalPid, ext_calls, NewExtCalls} ->
loop(State, Analysis, NewExtCalls);
{AnalPid, ext_types, ExtTypes} ->
@@ -135,11 +135,9 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
%% Remote type postprocessing
NewCServer =
try
- NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer0),
+ TmpCServer1 = dialyzer_utils:merge_types(TmpCServer0, Plt),
NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer0),
- OldRecords = dialyzer_plt:get_types(Plt),
OldExpTypes0 = dialyzer_plt:get_exported_types(Plt),
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
RemMods =
[case Analysis#analysis.start_from of
byte_code -> list_to_atom(filename:basename(F, ".beam"));
@@ -147,65 +145,92 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
end || F <- Files],
OldExpTypes1 = dialyzer_utils:sets_filter(RemMods, OldExpTypes0),
MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1),
- TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0),
TmpCServer2 =
dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
+ erlang:garbage_collect(), % reduce heap size
?timing(State#analysis_state.timing_server, "remote",
- begin
- TmpCServer3 =
- dialyzer_utils:process_record_remote_types(TmpCServer2),
- dialyzer_contracts:process_contract_remote_types(TmpCServer3)
- end)
+ contracts_and_records(TmpCServer2))
catch
throw:{error, _ErrorMsg} = Error -> exit(Error)
end,
- NewPlt0 = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)),
- ExpTypes = dialyzer_codeserver:get_exported_types(NewCServer),
- NewPlt1 = dialyzer_plt:insert_exported_types(NewPlt0, ExpTypes),
- State0 = State#analysis_state{plt = NewPlt1},
- dump_callgraph(Callgraph, State0, Analysis),
- State1 = State0#analysis_state{codeserver = NewCServer},
+ dump_callgraph(Callgraph, State, Analysis),
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
- Plt1 = dialyzer_plt:delete_list(NewPlt1, AllNodes),
+ Plt1_a = dialyzer_plt:delete_list(Plt, AllNodes),
+ Plt1 = dialyzer_plt:insert_callbacks(Plt1_a, NewCServer),
+ State1 = State#analysis_state{codeserver = NewCServer, plt = Plt1},
Exports = dialyzer_codeserver:get_exports(NewCServer),
+ NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
+ NonExportsList = sets:to_list(NonExports),
NewCallgraph =
case Analysis#analysis.race_detection of
true -> dialyzer_callgraph:put_race_detection(true, Callgraph);
false -> Callgraph
end,
- State2 = analyze_callgraph(NewCallgraph, State1#analysis_state{plt = Plt1}),
+ State2 = analyze_callgraph(NewCallgraph, State1),
+ #analysis_state{plt = MiniPlt2,
+ doc_plt = DocPlt,
+ codeserver = Codeserver0} = State2,
+ {Codeserver, MiniPlt3} = move_data(Codeserver0, MiniPlt2),
dialyzer_callgraph:dispose_race_server(NewCallgraph),
rcv_and_send_ext_types(Parent),
- NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
- NonExportsList = sets:to_list(NonExports),
- Plt2 = dialyzer_plt:delete_list(State2#analysis_state.plt, NonExportsList),
- send_codeserver_plt(Parent, CServer, State2#analysis_state.plt),
- send_analysis_done(Parent, Plt2, State2#analysis_state.doc_plt).
+ %% Since the PLT is never used, a dummy is sent:
+ DummyPlt = dialyzer_plt:new(),
+ send_codeserver_plt(Parent, Codeserver, DummyPlt),
+ MiniPlt4 = dialyzer_plt:delete_list(MiniPlt3, NonExportsList),
+ send_analysis_done(Parent, MiniPlt4, DocPlt).
+
+contracts_and_records(CodeServer) ->
+ Fun = contrs_and_recs(CodeServer),
+ {Pid, Ref} = erlang:spawn_monitor(Fun),
+ dialyzer_codeserver:give_away(CodeServer, Pid),
+ Pid ! {self(), go},
+ receive {'DOWN', Ref, process, Pid, Return} ->
+ Return
+ end.
+
+-spec contrs_and_recs(dialyzer_codeserver:codeserver()) ->
+ fun(() -> no_return()).
+
+contrs_and_recs(TmpCServer2) ->
+ fun() ->
+ Parent = receive {Pid, go} -> Pid end,
+ TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
+ TmpServer4 =
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3),
+ dialyzer_codeserver:give_away(TmpServer4, Parent),
+ exit(TmpServer4)
+ end.
+
+move_data(CServer, MiniPlt) ->
+ {CServer1, Records} = dialyzer_codeserver:extract_records(CServer),
+ MiniPlt1 = dialyzer_plt:insert_types(MiniPlt, Records),
+ {NewCServer, ExpTypes} = dialyzer_codeserver:extract_exported_types(CServer1),
+ NewMiniPlt = dialyzer_plt:insert_exported_types(MiniPlt1, ExpTypes),
+ {NewCServer, NewMiniPlt}.
analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
doc_plt = DocPlt,
+ plt = Plt,
timing_server = TimingServer,
parent = Parent,
solvers = Solvers} = State) ->
- Plt = dialyzer_plt:insert_callbacks(State#analysis_state.plt, Codeserver),
- {NewPlt, NewDocPlt} =
- case State#analysis_state.analysis_type of
- plt_build ->
- NewPlt0 =
- dialyzer_succ_typings:analyze_callgraph(Callgraph, Plt, Codeserver,
- TimingServer, Solvers, Parent),
- {NewPlt0, DocPlt};
- succ_typings ->
- {Warnings, NewPlt0, NewDocPlt0} =
- dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
- TimingServer, Solvers, Parent),
- Warnings1 = filter_warnings(Warnings, Codeserver),
- send_warnings(State#analysis_state.parent, Warnings1),
- {NewPlt0, NewDocPlt0}
- end,
- dialyzer_callgraph:delete(Callgraph),
- State#analysis_state{plt = NewPlt, doc_plt = NewDocPlt}.
+ case State#analysis_state.analysis_type of
+ plt_build ->
+ NewMiniPlt =
+ dialyzer_succ_typings:analyze_callgraph(Callgraph, Plt, Codeserver,
+ TimingServer, Solvers, Parent),
+ dialyzer_callgraph:delete(Callgraph),
+ State#analysis_state{plt = NewMiniPlt, doc_plt = DocPlt};
+ succ_typings ->
+ {Warnings, NewMiniPlt, NewDocPlt} =
+ dialyzer_succ_typings:get_warnings(Callgraph, Plt, DocPlt, Codeserver,
+ TimingServer, Solvers, Parent),
+ dialyzer_callgraph:delete(Callgraph),
+ Warnings1 = filter_warnings(Warnings, Codeserver),
+ send_warnings(State#analysis_state.parent, Warnings1),
+ State#analysis_state{plt = NewMiniPlt, doc_plt = NewDocPlt}
+ end.
%%--------------------------------------------------------------------
%% Build the callgraph and fill the codeserver.
@@ -562,8 +587,9 @@ is_ok_fun({_Filename, _Line, {_M, _F, _A} = MFA}, Codeserver) ->
is_ok_tag(Tag, {_F, _L, MorMFA}, Codeserver) ->
not dialyzer_utils:is_suppressed_tag(MorMFA, Tag, Codeserver).
-send_analysis_done(Parent, Plt, DocPlt) ->
- Parent ! {self(), done, Plt, DocPlt},
+send_analysis_done(Parent, MiniPlt, DocPlt) ->
+ ok = dialyzer_plt:give_away(MiniPlt, Parent),
+ Parent ! {self(), done, MiniPlt, DocPlt},
ok.
send_ext_calls(_Parent, none) ->
@@ -576,7 +602,8 @@ send_ext_types(Parent, ExtTypes) ->
Parent ! {self(), ext_types, ExtTypes},
ok.
-send_codeserver_plt(Parent, CServer, Plt ) ->
+send_codeserver_plt(Parent, CServer, Plt) ->
+ ok = dialyzer_codeserver:give_away(CServer, Parent),
Parent ! {self(), cserver, CServer, Plt},
ok.
@@ -595,14 +622,14 @@ format_bad_calls([{{_, _, _}, {_, module_info, A}}|Left], CodeServer, Acc)
format_bad_calls([{FromMFA, {M, F, A} = To}|Left], CodeServer, Acc) ->
{_Var, FunCode} = dialyzer_codeserver:lookup_mfa_code(FromMFA, CodeServer),
Msg = {call_to_missing, [M, F, A]},
- {File, Line} = find_call_file_and_line(FunCode, To),
+ {File, Line} = find_call_file_and_line(FromMFA, FunCode, To, CodeServer),
WarningInfo = {File, Line, FromMFA},
NewAcc = [{?WARN_CALLGRAPH, WarningInfo, Msg}|Acc],
format_bad_calls(Left, CodeServer, NewAcc);
format_bad_calls([], _CodeServer, Acc) ->
Acc.
-find_call_file_and_line(Tree, MFA) ->
+find_call_file_and_line({Module, _, _}, Tree, MFA, CodeServer) ->
Fun =
fun(SubTree, Acc) ->
case cerl:is_c_call(SubTree) of
@@ -615,7 +642,7 @@ find_call_file_and_line(Tree, MFA) ->
case {cerl:concrete(M), cerl:concrete(F), A} of
MFA ->
Ann = cerl:get_ann(SubTree),
- [{get_file(Ann), get_line(Ann)}|Acc];
+ [{get_file(CodeServer, Module, Ann), get_line(Ann)}|Acc];
{erlang, make_fun, 3} ->
[CA1, CA2, CA3] = cerl:call_args(SubTree),
case
@@ -631,7 +658,8 @@ find_call_file_and_line(Tree, MFA) ->
of
MFA ->
Ann = cerl:get_ann(SubTree),
- [{get_file(Ann), get_line(Ann)}|Acc];
+ [{get_file(CodeServer, Module, Ann),
+ get_line(Ann)}|Acc];
_ ->
Acc
end;
@@ -651,8 +679,10 @@ get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file(Codeserver, Module, [{file, FakeFile}|_]) ->
+ dialyzer_codeserver:translate_fake_file(Codeserver, Module, FakeFile);
+get_file(Codeserver, Module, [_|Tail]) ->
+ get_file(Codeserver, Module, Tail).
-spec dump_callgraph(dialyzer_callgraph:callgraph(), #analysis_state{}, #analysis{}) ->
'ok'.
diff --git a/lib/dialyzer/src/dialyzer_behaviours.erl b/lib/dialyzer/src/dialyzer_behaviours.erl
index e79a5d1cd9..d380ab2a50 100644
--- a/lib/dialyzer/src/dialyzer_behaviours.erl
+++ b/lib/dialyzer/src/dialyzer_behaviours.erl
@@ -55,9 +55,9 @@ check_callbacks(Module, Attrs, Records, Plt, Codeserver) ->
_ ->
MFA = {Module,module_info,0},
{_Var,Code} = dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
- File = get_file(cerl:get_ann(Code)),
+ File = get_file(Codeserver, Module, cerl:get_ann(Code)),
State = #state{plt = Plt, filename = File, behlines = BehLines,
- codeserver = Codeserver, records = Records},
+ codeserver = Codeserver, records = Records},
Warnings = get_warnings(Module, Behaviours, State),
[add_tag_warning_info(Module, W, State) || W <- Warnings]
end.
@@ -206,12 +206,15 @@ add_tag_warning_info(Module, {_Tag, [_B, Fun, Arity|_R]} = Warn, State) ->
dialyzer_codeserver:lookup_mfa_code({Module, Fun, Arity},
State#state.codeserver),
Anns = cerl:get_ann(FunCode),
- WarningInfo = {get_file(Anns), get_line(Anns), {Module, Fun, Arity}},
+ File = get_file(State#state.codeserver, Module, Anns),
+ WarningInfo = {File, get_line(Anns), {Module, Fun, Arity}},
{?WARN_BEHAVIOUR, WarningInfo, Warn}.
get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file(Codeserver, Module, [{file, FakeFile}|_]) ->
+ dialyzer_codeserver:translate_fake_file(Codeserver, Module, FakeFile);
+get_file(Codeserver, Module, [_|Tail]) ->
+ get_file(Codeserver, Module, Tail).
diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl
index 227ee2a892..6387f3d1e4 100644
--- a/lib/dialyzer/src/dialyzer_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_callgraph.erl
@@ -40,7 +40,7 @@
module_postorder_from_funs/2,
new/0,
get_depends_on/2,
- get_required_by/2,
+ %% get_required_by/2,
in_neighbours/2,
renew_race_info/4,
renew_race_code/2,
@@ -112,7 +112,11 @@
-opaque callgraph() :: #callgraph{}.
--type active_digraph() :: {'d', digraph:graph()} | {'e', ets:tid(), ets:tid()}.
+-type active_digraph() :: {'d', digraph:graph()}
+ | {'e',
+ Out :: ets:tid(),
+ In :: ets:tid(),
+ Map :: ets:tid()}.
%%----------------------------------------------------------------------
@@ -241,23 +245,29 @@ find_non_local_calls([], Set) ->
-spec get_depends_on(scc() | module(), callgraph()) -> [scc()].
-get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In}}) ->
- case ets_lookup_dict(SCC, Out) of
- {ok, Value} -> Value;
- error -> []
- end;
+get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In, Maps}}) ->
+ lookup_scc(SCC, Out, Maps);
get_depends_on(SCC, #callgraph{active_digraph = {'d', DG}}) ->
digraph:out_neighbours(DG, SCC).
--spec get_required_by(scc() | module(), callgraph()) -> [scc()].
-
-get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In}}) ->
- case ets_lookup_dict(SCC, In) of
- {ok, Value} -> Value;
+%% -spec get_required_by(scc() | module(), callgraph()) -> [scc()].
+
+%% get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) ->
+%% lookup_scc(SCC, In, Maps);
+%% get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
+%% digraph:in_neighbours(DG, SCC).
+
+lookup_scc(SCC, Table, Maps) ->
+ case ets_lookup_dict({'scc', SCC}, Maps) of
+ {ok, SCCInt} ->
+ case ets_lookup_dict(SCCInt, Table) of
+ {ok, Ints} ->
+ [ets:lookup_element(Maps, Int, 2) || Int <- Ints];
+ error ->
+ []
+ end;
error -> []
- end;
-get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
- digraph:in_neighbours(DG, SCC).
+ end.
%%----------------------------------------------------------------------
%% Handling of modules & SCCs
@@ -275,9 +285,11 @@ module_postorder(#callgraph{digraph = DG}) ->
Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
MDG = digraph:new([acyclic]),
digraph_confirm_vertices(sets:to_list(Nodes), MDG),
- Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end,
+ Foreach = fun({M1,M2}) -> _ = digraph:add_edge(MDG, M1, M2) end,
lists:foreach(Foreach, sets:to_list(Edges)),
- {digraph_utils:topsort(MDG), {'d', MDG}}.
+ %% The out-neighbors of a vertex are the vertices called directly.
+ %% The used vertices are to occur *before* the calling vertex:
+ {lists:reverse(digraph_utils:topsort(MDG)), {'d', MDG}}.
edge_fold({{M1,_,_},{M2,_,_}}, Set) ->
case M1 =/= M2 of
@@ -295,7 +307,7 @@ module_deps(#callgraph{digraph = DG}) ->
Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
MDG = digraph:new(),
digraph_confirm_vertices(sets:to_list(Nodes), MDG),
- Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end,
+ Foreach = fun({M1,M2}) -> check_add_edge(MDG, M1, M2) end,
lists:foreach(Foreach, sets:to_list(Edges)),
Deps = [{N, ordsets:from_list(digraph:in_neighbours(MDG, N))}
|| N <- sets:to_list(Nodes)],
@@ -353,7 +365,7 @@ ets_lookup_set(Key, Table) ->
%% The core tree must be labeled as by cerl_trees:label/1 (or /2).
%% The set of labels in the tree must be disjoint from the set of
-%% labels already occuring in the callgraph.
+%% labels already occurring in the callgraph.
-spec scan_core_tree(cerl:c_module(), callgraph()) ->
{[mfa_or_funlbl()], [callgraph_edge()]}.
@@ -542,9 +554,21 @@ digraph_add_edge(From, To, DG) ->
false -> digraph:add_vertex(DG, To);
{To, _} -> ok
end,
- digraph:add_edge(DG, {From, To}, From, To, []),
+ check_add_edge(DG, {From, To}, From, To, []),
ok.
+check_add_edge(G, V1, V2) ->
+ case digraph:add_edge(G, V1, V2) of
+ {error, Error} -> exit({add_edge, V1, V2, Error});
+ _Edge -> ok
+ end.
+
+check_add_edge(G, E, V1, V2, L) ->
+ case digraph:add_edge(G, E, V1, V2, L) of
+ {error, Error} -> exit({add_edge, E, V1, V2, L, Error});
+ _Edge -> ok
+ end.
+
digraph_confirm_vertices([MFA|Left], DG) ->
digraph:add_vertex(DG, MFA, confirmed),
digraph_confirm_vertices(Left, DG);
@@ -575,9 +599,10 @@ digraph_delete(DG) ->
active_digraph_delete({'d', DG}) ->
digraph:delete(DG);
-active_digraph_delete({'e', Out, In}) ->
+active_digraph_delete({'e', Out, In, Maps}) ->
ets:delete(Out),
- ets:delete(In).
+ ets:delete(In),
+ ets:delete(Maps).
digraph_edges(DG) ->
digraph:edges(DG).
@@ -751,37 +776,53 @@ to_ps(#callgraph{} = CG, File, Args) ->
ok.
condensation(G) ->
- SCs = digraph_utils:strong_components(G),
- V2I = ets:new(condensation_v2i, []),
- I2C = ets:new(condensation_i2c, []),
- I2I = ets:new(condensation_i2i, [bag]),
- CFun =
- fun(SC, N) ->
- lists:foreach(fun(V) -> true = ets:insert(V2I, {V,N}) end, SC),
- true = ets:insert(I2C, {N, SC}),
- N + 1
- end,
- lists:foldl(CFun, 1, SCs),
- Fun1 =
- fun({V1, V2}) ->
- I1 = ets:lookup_element(V2I, V1, 2),
- I2 = ets:lookup_element(V2I, V2, 2),
- I1 =:= I2 orelse ets:insert(I2I, {I1, I2})
- end,
- lists:foreach(Fun1, digraph:edges(G)),
- Fun3 =
- fun({I1, I2}, {Out, In}) ->
- SC1 = ets:lookup_element(I2C, I1, 2),
- SC2 = ets:lookup_element(I2C, I2, 2),
- {dict:append(SC1, SC2, Out), dict:append(SC2, SC1, In)}
- end,
- {OutDict, InDict} = ets:foldl(Fun3, {dict:new(), dict:new()}, I2I),
- [OutETS, InETS] =
- [ets:new(Name,[{read_concurrency, true}]) ||
- Name <- [callgraph_deps_out, callgraph_deps_in]],
- ets:insert(OutETS, dict:to_list(OutDict)),
- ets:insert(InETS, dict:to_list(InDict)),
- ets:delete(V2I),
- ets:delete(I2C),
- ets:delete(I2I),
- {{'e', OutETS, InETS}, SCs}.
+ erlang:garbage_collect(), % reduce heap size
+ {Pid, Ref} = erlang:spawn_monitor(do_condensation(G, self())),
+ receive {'DOWN', Ref, process, Pid, Result} ->
+ {SCCInts, OutETS, InETS, MapsETS} = Result,
+ NewSCCs = [ets:lookup_element(MapsETS, SCCInt, 2) || SCCInt <- SCCInts],
+ {{'e', OutETS, InETS, MapsETS}, NewSCCs}
+ end.
+
+-spec do_condensation(digraph:graph(), pid()) -> fun(() -> no_return()).
+
+do_condensation(G, Parent) ->
+ fun() ->
+ [OutETS, InETS, MapsETS] =
+ [ets:new(Name,[{read_concurrency, true}]) ||
+ Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]],
+ SCCs = digraph_utils:strong_components(G),
+ %% Assign unique numbers to SCCs:
+ Ints = lists:seq(1, length(SCCs)),
+ IntToSCC = lists:zip(Ints, SCCs),
+ IntScc = sofs:relation(IntToSCC, [{int, scc}]),
+ %% Create mapping from unique integers to SCCs:
+ ets:insert(MapsETS, IntToSCC),
+ %% Subsitute strong components for vertices in edges using the
+ %% unique numbers:
+ C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]),
+ I2V = sofs:relative_product(IntScc, C2V), % [{v, int}]
+ Es = sofs:relation(digraph:edges(G), [{v, v}]),
+ R1 = sofs:relative_product(I2V, Es),
+ R2 = sofs:relative_product(I2V, sofs:converse(R1)),
+ R2Strict = sofs:strict_relation(R2),
+ %% Create out-neighbours:
+ Out = sofs:relation_to_family(sofs:converse(R2Strict)),
+ ets:insert(OutETS, sofs:to_external(Out)),
+ %% Sort the SCCs topologically:
+ DG = sofs:family_to_digraph(Out),
+ lists:foreach(fun(I) -> digraph:add_vertex(DG, I) end, Ints),
+ SCCInts0 = digraph_utils:topsort(DG),
+ digraph:delete(DG),
+ %% The out-neighbors of a vertex are the vertices called directly.
+ %% The used vertices are to occur *before* the calling vertex:
+ SCCInts = lists:reverse(SCCInts0),
+ %% Create in-neighbours:
+ In = sofs:relation_to_family(R2Strict),
+ ets:insert(InETS, sofs:to_external(In)),
+ %% Create mapping from SCCs to unique integers:
+ ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)),
+ lists:foreach(fun(E) -> true = ets:give_away(E, Parent, any)
+ end, [OutETS, InETS, MapsETS]),
+ exit({SCCInts, OutETS, InETS, MapsETS})
+ end.
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index b43711446b..8500c59ebe 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -30,6 +30,8 @@
-record(cl_state,
{backend_pid :: pid() | 'undefined',
+ code_server = none :: 'none'
+ | dialyzer_codeserver:codeserver(),
erlang_mode = false :: boolean(),
external_calls = [] :: [mfa()],
external_types = [] :: [mfa()],
@@ -630,8 +632,11 @@ cl_loop(State, LogCache) ->
{BackendPid, warnings, Warnings} ->
NewState = store_warnings(State, Warnings),
cl_loop(NewState, LogCache);
- {BackendPid, done, NewPlt, _NewDocPlt} ->
- return_value(State, NewPlt);
+ {BackendPid, cserver, CodeServer, _Plt} -> % Plt is ignored
+ NewState = State#cl_state{code_server = CodeServer},
+ cl_loop(NewState, LogCache);
+ {BackendPid, done, NewMiniPlt, _NewDocPlt} ->
+ return_value(State, NewMiniPlt);
{BackendPid, ext_calls, ExtCalls} ->
cl_loop(State#cl_state{external_calls = ExtCalls}, LogCache);
{BackendPid, ext_types, ExtTypes} ->
@@ -687,15 +692,34 @@ cl_error(State, Msg) ->
maybe_close_output_file(State),
throw({dialyzer_error, lists:flatten(Msg)}).
-return_value(State = #cl_state{erlang_mode = ErlangMode,
+return_value(State = #cl_state{code_server = CodeServer,
+ erlang_mode = ErlangMode,
mod_deps = ModDeps,
output_plt = OutputPlt,
plt_info = PltInfo,
stored_warnings = StoredWarnings},
- Plt) ->
+ MiniPlt) ->
+ %% Just for now:
+ case CodeServer =:= none of
+ true ->
+ ok;
+ false ->
+ dialyzer_codeserver:delete(CodeServer)
+ end,
case OutputPlt =:= none of
- true -> ok;
- false -> dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
+ true ->
+ dialyzer_plt:delete(MiniPlt);
+ false ->
+ Fun = to_file_fun(OutputPlt, MiniPlt, ModDeps, PltInfo),
+ {Pid, Ref} = erlang:spawn_monitor(Fun),
+ dialyzer_plt:give_away(MiniPlt, Pid),
+ Pid ! go,
+ receive {'DOWN', Ref, process, Pid, Result} ->
+ case Result of
+ ok -> ok;
+ Thrown -> throw(Thrown)
+ end
+ end
end,
UnknownWarnings = unknown_warnings(State),
RetValue =
@@ -716,6 +740,16 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
{RetValue, set_warning_id(AllWarnings)}
end.
+-spec to_file_fun(_, _, _, _) -> fun(() -> no_return()).
+
+to_file_fun(Filename, MiniPlt, ModDeps, PltInfo) ->
+ fun() ->
+ receive go -> ok end,
+ Plt = dialyzer_plt:restore_full_plt(MiniPlt),
+ dialyzer_plt:to_file(Filename, Plt, ModDeps, PltInfo),
+ exit(ok)
+ end.
+
unknown_warnings(State = #cl_state{legal_warnings = LegalWarnings}) ->
Unknown = case ordsets:is_element(?WARN_UNKNOWN, LegalWarnings) of
true ->
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index dd383ea828..a1a7370eff 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -22,18 +22,25 @@
-module(dialyzer_codeserver).
-export([delete/1,
- finalize_contracts/3,
+ store_temp_contracts/4,
+ give_away/2,
+ finalize_contracts/1,
finalize_exported_types/2,
- finalize_records/2,
+ finalize_records/1,
get_contracts/1,
get_callbacks/1,
get_exported_types/1,
+ extract_exported_types/1,
get_exports/1,
- get_records/1,
+ get_records_table/1,
+ extract_records/1,
get_next_core_label/1,
- get_temp_contracts/1,
+ get_temp_contracts/2,
+ all_temp_modules/1,
+ store_contracts/4,
get_temp_exported_types/1,
- get_temp_records/1,
+ get_temp_records_table/1,
+ lookup_temp_mod_records/2,
insert/3,
insert_exports/2,
insert_temp_exported_types/2,
@@ -41,30 +48,29 @@
is_exported/2,
lookup_mod_code/2,
lookup_mfa_code/2,
+ lookup_mfa_var_label/2,
lookup_mod_records/2,
lookup_mod_contracts/2,
lookup_mfa_contract/2,
lookup_meta_info/2,
new/0,
set_next_core_label/2,
- set_temp_records/2,
store_temp_records/3,
- store_temp_contracts/4]).
+ translate_fake_file/3]).
--export_type([codeserver/0, fun_meta_info/0]).
+-export_type([codeserver/0, fun_meta_info/0, contracts/0]).
-include("dialyzer.hrl").
%%--------------------------------------------------------------------
-type dict_ets() :: ets:tid().
+-type map_ets() :: ets:tid().
-type set_ets() :: ets:tid().
-type types() :: erl_types:type_table().
--type mod_records() :: dict:dict(module(), types()).
--type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
--type mod_contracts() :: dict:dict(module(), contracts()).
+-type contracts() :: #{mfa() => dialyzer_contracts:file_contract()}.
%% A property-list of data compiled from -compile and -dialyzer attributes.
-type meta_info() :: [{{'nowarn_function' | dial_warn_tag()},
@@ -74,16 +80,16 @@
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
- exported_types :: set_ets() | 'undefined', % set(mfa())
- records :: dict_ets() | 'undefined',
- contracts :: dict_ets() | 'undefined',
- callbacks :: dict_ets() | 'undefined',
+ exported_types :: 'clean' | set_ets(), % set(mfa())
+ records :: 'clean' | map_ets(),
+ contracts :: map_ets(),
+ callbacks :: map_ets(),
fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
exports :: 'clean' | set_ets(), % set(mfa())
temp_exported_types :: 'clean' | set_ets(), % set(mfa())
- temp_records :: 'clean' | dict_ets(),
- temp_contracts :: 'clean' | dict_ets(),
- temp_callbacks :: 'clean' | dict_ets()
+ temp_records :: 'clean' | map_ets(),
+ temp_contracts :: 'clean' | map_ets(),
+ temp_callbacks :: 'clean' | map_ets()
}).
-opaque codeserver() :: #codeserver{}.
@@ -97,13 +103,10 @@ ets_dict_find(Key, Table) ->
_:_ -> error
end.
-ets_dict_store(Key, Element, Table) ->
+ets_map_store(Key, Element, Table) ->
true = ets:insert(Table, {Key, Element}),
Table.
-ets_dict_store_dict(Dict, Table) ->
- true = ets:insert(Table, dict:to_list(Dict)).
-
ets_dict_to_dict(Table) ->
Fold = fun({Key,Value}, Dict) -> dict:store(Key, Value, Dict) end,
ets:foldl(Fold, dict:new(), Table).
@@ -121,9 +124,6 @@ ets_set_to_set(Table) ->
Fold = fun({E}, Set) -> sets:add_element(E, Set) end,
ets:foldl(Fold, sets:new(), Table).
-ets_read_concurrent_table(Name) ->
- ets:new(Name, [{read_concurrency, true}]).
-
%%--------------------------------------------------------------------
-spec new() -> codeserver().
@@ -131,6 +131,13 @@ ets_read_concurrent_table(Name) ->
new() ->
CodeOptions = [compressed, public, {read_concurrency, true}],
Code = ets:new(dialyzer_codeserver_code, CodeOptions),
+ ReadOptions = [compressed, {read_concurrency, true}],
+ [Contracts, Callbacks, Records, ExportedTypes] =
+ [ets:new(Name, ReadOptions) ||
+ Name <- [dialyzer_codeserver_contracts,
+ dialyzer_codeserver_callbacks,
+ dialyzer_codeserver_records,
+ dialyzer_codeserver_exported_types]],
TempOptions = [public, {write_concurrency, true}],
[Exports, FunMetaInfo, TempExportedTypes, TempRecords, TempContracts,
TempCallbacks] =
@@ -143,6 +150,10 @@ new() ->
#codeserver{code = Code,
exports = Exports,
fun_meta_info = FunMetaInfo,
+ exported_types = ExportedTypes,
+ records = Records,
+ contracts = Contracts,
+ callbacks = Callbacks,
temp_exported_types = TempExportedTypes,
temp_records = TempRecords,
temp_contracts = TempContracts,
@@ -150,11 +161,8 @@ new() ->
-spec delete(codeserver()) -> 'ok'.
-delete(#codeserver{code = Code, exported_types = ExportedTypes,
- records = Records, contracts = Contracts,
- callbacks = Callbacks}) ->
- lists:foreach(fun ets:delete/1,
- [Code, ExportedTypes, Records, Contracts, Callbacks]).
+delete(CServer) ->
+ lists:foreach(fun(Table) -> true = ets:delete(Table) end, tables(CServer)).
-spec insert(atom(), cerl:c_module(), codeserver()) -> codeserver().
@@ -163,13 +171,15 @@ insert(Mod, ModCode, CS) ->
Exports = cerl:module_exports(ModCode),
Attrs = cerl:module_attrs(ModCode),
Defs = cerl:module_defs(ModCode),
+ {Files, SmallDefs} = compress_file_anno(Defs),
As = cerl:get_ann(ModCode),
Funs =
[{{Mod, cerl:fname_id(Var), cerl:fname_arity(Var)},
- Val} || Val = {Var, _Fun} <- Defs],
- Keys = [Key || {Key, _Value} <- Funs],
+ Val, {Var, cerl_trees:get_label(Fun)}} || Val = {Var, Fun} <- SmallDefs],
+ Keys = [Key || {Key, _Value, _Label} <- Funs],
ModEntry = {Mod, {Name, Exports, Attrs, Keys, As}},
- true = ets:insert(CS#codeserver.code, [ModEntry|Funs]),
+ ModFileEntry = {{mod, Mod}, Files},
+ true = ets:insert(CS#codeserver.code, [ModEntry, ModFileEntry|Funs]),
CS.
-spec get_temp_exported_types(codeserver()) -> sets:set(mfa()).
@@ -206,6 +216,11 @@ is_exported(MFA, #codeserver{exports = Exports}) ->
get_exported_types(#codeserver{exported_types = ExpTypes}) ->
ets_set_to_set(ExpTypes).
+-spec extract_exported_types(codeserver()) -> {codeserver(), set_ets()}.
+
+extract_exported_types(#codeserver{exported_types = ExpTypes} = CS) ->
+ {CS#codeserver{exported_types = 'clean'}, ExpTypes}.
+
-spec get_exports(codeserver()) -> sets:set(mfa()).
get_exports(#codeserver{exports = Exports}) ->
@@ -213,12 +228,12 @@ get_exports(#codeserver{exports = Exports}) ->
-spec finalize_exported_types(sets:set(mfa()), codeserver()) -> codeserver().
-finalize_exported_types(Set, CS) ->
- ExportedTypes = ets_read_concurrent_table(dialyzer_codeserver_exported_types),
+finalize_exported_types(Set,
+ #codeserver{exported_types = ExportedTypes,
+ temp_exported_types = TempETypes} = CS) ->
true = ets_set_insert_set(Set, ExportedTypes),
- TempExpTypes = CS#codeserver.temp_exported_types,
- true = ets:delete(TempExpTypes),
- CS#codeserver{exported_types = ExportedTypes, temp_exported_types = clean}.
+ true = ets:delete(TempETypes),
+ CS#codeserver{temp_exported_types = clean}.
-spec lookup_mod_code(atom(), codeserver()) -> cerl:c_module().
@@ -230,6 +245,11 @@ lookup_mod_code(Mod, CS) when is_atom(Mod) ->
lookup_mfa_code({_M, _F, _A} = MFA, CS) ->
table__lookup(CS#codeserver.code, MFA).
+-spec lookup_mfa_var_label(mfa(), codeserver()) -> {cerl:c_var(), label()}.
+
+lookup_mfa_var_label({_M, _F, _A} = MFA, CS) ->
+ ets:lookup_element(CS#codeserver.code, MFA, 3).
+
-spec get_next_core_label(codeserver()) -> label().
get_next_core_label(#codeserver{next_core_label = NCL}) ->
@@ -244,53 +264,58 @@ set_next_core_label(NCL, CS) ->
lookup_mod_records(Mod, #codeserver{records = RecDict}) when is_atom(Mod) ->
case ets_dict_find(Mod, RecDict) of
- error -> dict:new();
- {ok, Dict} -> Dict
+ error -> maps:new();
+ {ok, Map} -> Map
end.
--spec get_records(codeserver()) -> mod_records().
+-spec get_records_table(codeserver()) -> map_ets().
+
+get_records_table(#codeserver{records = RecDict}) ->
+ RecDict.
-get_records(#codeserver{records = RecDict}) ->
- ets_dict_to_dict(RecDict).
+-spec extract_records(codeserver()) -> {codeserver(), map_ets()}.
+
+extract_records(#codeserver{records = RecDict} = CS) ->
+ {CS#codeserver{records = clean}, RecDict}.
-spec store_temp_records(module(), types(), codeserver()) -> codeserver().
-store_temp_records(Mod, Dict, #codeserver{temp_records = TempRecDict} = CS)
+store_temp_records(Mod, Map, #codeserver{temp_records = TempRecDict} = CS)
when is_atom(Mod) ->
- case dict:size(Dict) =:= 0 of
+ case maps:size(Map) =:= 0 of
true -> CS;
- false -> CS#codeserver{temp_records = ets_dict_store(Mod, Dict, TempRecDict)}
+ false -> CS#codeserver{temp_records = ets_map_store(Mod, Map, TempRecDict)}
end.
--spec get_temp_records(codeserver()) -> mod_records().
+-spec get_temp_records_table(codeserver()) -> map_ets().
-get_temp_records(#codeserver{temp_records = TempRecDict}) ->
- ets_dict_to_dict(TempRecDict).
+get_temp_records_table(#codeserver{temp_records = TempRecDict}) ->
+ TempRecDict.
--spec set_temp_records(mod_records(), codeserver()) -> codeserver().
+-spec lookup_temp_mod_records(module(), codeserver()) -> types().
-set_temp_records(Dict, CS) ->
- true = ets:delete(CS#codeserver.temp_records),
- TempRecords = ets:new(dialyzer_codeserver_temp_records,[]),
- true = ets_dict_store_dict(Dict, TempRecords),
- CS#codeserver{temp_records = TempRecords}.
+lookup_temp_mod_records(Mod, #codeserver{temp_records = TempRecDict}) ->
+ case ets_dict_find(Mod, TempRecDict) of
+ error -> maps:new();
+ {ok, Map} -> Map
+ end.
--spec finalize_records(mod_records(), codeserver()) -> codeserver().
+-spec finalize_records(codeserver()) -> codeserver().
-finalize_records(Dict, CS) ->
- true = ets:delete(CS#codeserver.temp_records),
- Records = ets_read_concurrent_table(dialyzer_codeserver_records),
- true = ets_dict_store_dict(Dict, Records),
- CS#codeserver{records = Records, temp_records = clean}.
+finalize_records(#codeserver{temp_records = TmpRecords,
+ records = Records} = CS) ->
+ true = ets:delete(Records),
+ ets:rename(TmpRecords, dialyzer_codeserver_records),
+ CS#codeserver{temp_records = clean, records = TmpRecords}.
-spec lookup_mod_contracts(atom(), codeserver()) -> contracts().
lookup_mod_contracts(Mod, #codeserver{contracts = ContDict})
when is_atom(Mod) ->
case ets_dict_find(Mod, ContDict) of
- error -> dict:new();
+ error -> maps:new();
{ok, Keys} ->
- dict:from_list([get_file_contract(Key, ContDict)|| Key <- Keys])
+ maps:from_list([get_file_contract(Key, ContDict)|| Key <- Keys])
end.
get_file_contract(Key, ContDict) ->
@@ -310,10 +335,13 @@ lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) ->
{ok, PropList} -> PropList
end.
--spec get_contracts(codeserver()) -> mod_contracts().
+-spec get_contracts(codeserver()) ->
+ dict:dict(mfa(), dialyzer_contracts:file_contract()).
get_contracts(#codeserver{contracts = ContDict}) ->
- ets_dict_to_dict(ContDict).
+ dict:filter(fun({_M, _F, _A}, _) -> true;
+ (_, _) -> false
+ end, ets_dict_to_dict(ContDict)).
-spec get_callbacks(codeserver()) -> list().
@@ -323,48 +351,79 @@ get_callbacks(#codeserver{callbacks = CallbDict}) ->
-spec store_temp_contracts(module(), contracts(), contracts(), codeserver()) ->
codeserver().
-store_temp_contracts(Mod, SpecDict, CallbackDict,
+store_temp_contracts(Mod, SpecMap, CallbackMap,
#codeserver{temp_contracts = Cn,
temp_callbacks = Cb} = CS)
when is_atom(Mod) ->
- CS1 =
- case dict:size(SpecDict) =:= 0 of
- true -> CS;
- false ->
- CS#codeserver{temp_contracts = ets_dict_store(Mod, SpecDict, Cn)}
- end,
- case dict:size(CallbackDict) =:= 0 of
- true -> CS1;
- false ->
- CS1#codeserver{temp_callbacks = ets_dict_store(Mod, CallbackDict, Cb)}
- end.
+ %% Make sure Mod is stored even if there are not callbacks or
+ %% contracts.
+ CS1 = CS#codeserver{temp_contracts = ets_map_store(Mod, SpecMap, Cn)},
+ CS1#codeserver{temp_callbacks = ets_map_store(Mod, CallbackMap, Cb)}.
--spec get_temp_contracts(codeserver()) -> {mod_contracts(), mod_contracts()}.
+-spec all_temp_modules(codeserver()) -> [module()].
-get_temp_contracts(#codeserver{temp_contracts = TempContDict,
- temp_callbacks = TempCallDict}) ->
- {ets_dict_to_dict(TempContDict), ets_dict_to_dict(TempCallDict)}.
+all_temp_modules(#codeserver{temp_contracts = TempContTable}) ->
+ ets:select(TempContTable, [{{'$1', '$2'}, [], ['$1']}]).
--spec finalize_contracts(mod_contracts(), mod_contracts(), codeserver()) ->
- codeserver().
+-spec store_contracts(module(), contracts(), contracts(), codeserver()) ->
+ codeserver().
-finalize_contracts(SpecDict, CallbackDict, CS) ->
- Contracts = ets_read_concurrent_table(dialyzer_codeserver_contracts),
- Callbacks = ets_read_concurrent_table(dialyzer_codeserver_callbacks),
- Contracts = dict:fold(fun decompose_spec_dict/3, Contracts, SpecDict),
- Callbacks = dict:fold(fun decompose_cb_dict/3, Callbacks, CallbackDict),
- CS#codeserver{contracts = Contracts, callbacks = Callbacks,
- temp_contracts = clean, temp_callbacks = clean}.
+store_contracts(Mod, SpecMap, CallbackMap, CS) ->
+ #codeserver{contracts = SpecDict, callbacks = CallbackDict} = CS,
+ Keys = maps:keys(SpecMap),
+ true = ets:insert(SpecDict, maps:to_list(SpecMap)),
+ true = ets:insert(SpecDict, {Mod, Keys}),
+ true = ets:insert(CallbackDict, maps:to_list(CallbackMap)),
+ CS.
-decompose_spec_dict(Mod, Dict, Table) ->
- Keys = dict:fetch_keys(Dict),
- true = ets:insert(Table, dict:to_list(Dict)),
- true = ets:insert(Table, {Mod, Keys}),
- Table.
-
-decompose_cb_dict(_Mod, Dict, Table) ->
- true = ets:insert(Table, dict:to_list(Dict)),
- Table.
+-spec get_temp_contracts(module(), codeserver()) ->
+ {contracts(), contracts()}.
+
+get_temp_contracts(Mod, #codeserver{temp_contracts = TempContDict,
+ temp_callbacks = TempCallDict}) ->
+ [{Mod, Contracts}] = ets:lookup(TempContDict, Mod),
+ true = ets:delete(TempContDict, Mod),
+ [{Mod, Callbacks}] = ets:lookup(TempCallDict, Mod),
+ true = ets:delete(TempCallDict, Mod),
+ {Contracts, Callbacks}.
+
+-spec give_away(codeserver(), pid()) -> 'ok'.
+
+give_away(CServer, Pid) ->
+ lists:foreach(fun(Table) -> true = ets:give_away(Table, Pid, any)
+ end, tables(CServer)).
+
+tables(#codeserver{code = Code,
+ fun_meta_info = FunMetaInfo,
+ exports = Exports,
+ temp_exported_types = TempExpTypes,
+ temp_records = TempRecords,
+ temp_contracts = TempContracts,
+ temp_callbacks = TempCallbacks,
+ exported_types = ExportedTypes,
+ records = Records,
+ contracts = Contracts,
+ callbacks = Callbacks}) ->
+ [Table || Table <- [Code, FunMetaInfo, Exports, TempExpTypes,
+ TempRecords, TempContracts, TempCallbacks,
+ ExportedTypes, Records, Contracts, Callbacks],
+ Table =/= clean].
+
+-spec finalize_contracts(codeserver()) -> codeserver().
+
+finalize_contracts(#codeserver{temp_contracts = TempContDict,
+ temp_callbacks = TempCallDict} = CS) ->
+ true = ets:delete(TempContDict),
+ true = ets:delete(TempCallDict),
+ CS#codeserver{temp_contracts = clean, temp_callbacks = clean}.
+
+-spec translate_fake_file(codeserver(), module(), file:filename()) ->
+ file:filename().
+
+translate_fake_file(#codeserver{code = Code}, Module, FakeFile) ->
+ Files = ets:lookup_element(Code, {mod, Module}, 2),
+ {FakeFile, File} = lists:keyfind(FakeFile, 1, Files),
+ File.
table__lookup(TablePid, M) when is_atom(M) ->
{Name, Exports, Attrs, Keys, As} = ets:lookup_element(TablePid, M, 2),
@@ -372,3 +431,25 @@ table__lookup(TablePid, M) when is_atom(M) ->
cerl:ann_c_module(As, Name, Exports, Attrs, Defs);
table__lookup(TablePid, MFA) ->
ets:lookup_element(TablePid, MFA, 2).
+
+compress_file_anno(Term) ->
+ {Files, SmallTerm} = compress_file_anno(Term, []),
+ {[{FakeFile, File} || {File, {file, FakeFile}} <- Files], SmallTerm}.
+
+compress_file_anno({file, F}, Fs) when is_list(F) ->
+ case lists:keyfind(F, 1, Fs) of
+ false ->
+ I = integer_to_list(length(Fs)),
+ FileI = {file, I},
+ NFs = [{F, FileI}|Fs],
+ {NFs, FileI};
+ {F, FileI} -> {Fs, FileI}
+ end;
+compress_file_anno(T, Fs) when is_tuple(T) ->
+ {NFs, NL} = compress_file_anno(tuple_to_list(T), Fs),
+ {NFs, list_to_tuple(NL)};
+compress_file_anno([E|L], Fs) ->
+ {Fs1, NE} = compress_file_anno(E, Fs),
+ {NFs, NL} = compress_file_anno(L, Fs1),
+ {NFs, [NE|NL]};
+compress_file_anno(T, Fs) -> {Fs, T}.
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index 7cc4a9d3eb..5f24b5a668 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -140,44 +140,44 @@ sequence([H], _Delimiter) -> H;
sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter).
-spec process_contract_remote_types(dialyzer_codeserver:codeserver()) ->
- dialyzer_codeserver:codeserver().
+ dialyzer_codeserver:codeserver().
process_contract_remote_types(CodeServer) ->
- {TmpContractDict, TmpCallbackDict} =
- dialyzer_codeserver:get_temp_contracts(CodeServer),
+ Mods = dialyzer_codeserver:all_temp_modules(CodeServer),
+ RecordTable = dialyzer_codeserver:get_records_table(CodeServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer),
- RecordDict = dialyzer_codeserver:get_records(CodeServer),
ContractFun =
fun({{_M, _F, _A}=MFA, {File, TmpContract, Xtra}}, C0) ->
#tmp_contract{contract_funs = CFuns, forms = Forms} = TmpContract,
{NewCs, C2} = lists:mapfoldl(fun(CFun, C1) ->
- CFun(ExpTypes, RecordDict, C1)
+ CFun(ExpTypes, RecordTable, C1)
end, C0, CFuns),
Args = general_domain(NewCs),
Contract = #contract{contracts = NewCs, args = Args, forms = Forms},
{{MFA, {File, Contract, Xtra}}, C2}
end,
ModuleFun =
- fun({ModuleName, ContractDict}, C3) ->
- {NewContractList, C4} =
- lists:mapfoldl(ContractFun, C3, dict:to_list(ContractDict)),
- {{ModuleName, dict:from_list(NewContractList)}, C4}
+ fun(ModuleName) ->
+ Cache = erl_types:cache__new(),
+ {ContractMap, CallbackMap} =
+ dialyzer_codeserver:get_temp_contracts(ModuleName, CodeServer),
+ {NewContractList, Cache1} =
+ lists:mapfoldl(ContractFun, Cache, maps:to_list(ContractMap)),
+ {NewCallbackList, _NewCache} =
+ lists:mapfoldl(ContractFun, Cache1, maps:to_list(CallbackMap)),
+ dialyzer_codeserver:store_contracts(ModuleName,
+ maps:from_list(NewContractList),
+ maps:from_list(NewCallbackList),
+ CodeServer)
end,
- Cache = erl_types:cache__new(),
- {NewContractList, C5} =
- lists:mapfoldl(ModuleFun, Cache, dict:to_list(TmpContractDict)),
- {NewCallbackList, _C6} =
- lists:mapfoldl(ModuleFun, C5, dict:to_list(TmpCallbackDict)),
- NewContractDict = dict:from_list(NewContractList),
- NewCallbackDict = dict:from_list(NewCallbackList),
- dialyzer_codeserver:finalize_contracts(NewContractDict, NewCallbackDict,
- CodeServer).
+ lists:foreach(ModuleFun, Mods),
+ dialyzer_codeserver:finalize_contracts(CodeServer).
-type opaques_fun() :: fun((module()) -> [erl_types:erl_type()]).
-type fun_types() :: dict:dict(label(), erl_types:type_table()).
--spec check_contracts([{mfa(), file_contract()}],
+-spec check_contracts(orddict:orddict(mfa(), file_contract()),
dialyzer_callgraph:callgraph(), fun_types(),
opaques_fun()) -> plt_contracts().
@@ -206,7 +206,7 @@ check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) ->
error -> NewContracts
end
end,
- dict:fold(FoldFun, [], FunTypes).
+ orddict:from_list(dict:fold(FoldFun, [], FunTypes)).
%% Checks all components of a contract
-spec check_contract(#contract{}, erl_types:erl_type()) -> 'ok' | {'error', term()}.
@@ -390,7 +390,7 @@ solve_constraints(Contract, Call, Constraints) ->
%% ?debug("Inf: ~s\n", [erl_types:t_to_string(Inf)]),
%% erl_types:t_assign_variables_to_subtype(Contract, Inf).
--type contracts() :: dict:dict(mfa(),dialyzer_contracts:file_contract()).
+-type contracts() :: dialyzer_codeserver:contracts().
%% Checks the contracts for functions that are not implemented
-spec contracts_without_fun(contracts(), [_], dialyzer_callgraph:callgraph()) ->
@@ -400,12 +400,12 @@ contracts_without_fun(Contracts, AllFuns0, Callgraph) ->
AllFuns1 = [{dialyzer_callgraph:lookup_name(Label, Callgraph), Arity}
|| {Label, Arity} <- AllFuns0],
AllFuns2 = [{M, F, A} || {{ok, {M, F, _}}, A} <- AllFuns1],
- AllContractMFAs = dict:fetch_keys(Contracts),
+ AllContractMFAs = maps:keys(Contracts),
ErrorContractMFAs = AllContractMFAs -- AllFuns2,
[warn_spec_missing_fun(MFA, Contracts) || MFA <- ErrorContractMFAs].
warn_spec_missing_fun({M, F, A} = MFA, Contracts) ->
- {{File, Line}, _Contract, _Xtra} = dict:fetch(MFA, Contracts),
+ {{File, Line}, _Contract, _Xtra} = maps:get(MFA, Contracts),
WarningInfo = {File, Line, MFA},
{?WARN_CONTRACT_SYNTAX, WarningInfo, {spec_missing_fun, [M, F, A]}}.
@@ -438,11 +438,11 @@ insert_constraints([], Map) -> Map.
-spec store_tmp_contract(mfa(), file_line(), spec_data(), contracts(), types()) ->
contracts().
-store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecDict, RecordsDict) ->
+store_tmp_contract(MFA, FileLine, {TypeSpec, Xtra}, SpecMap, RecordsDict) ->
%% io:format("contract from form: ~p\n", [TypeSpec]),
TmpContract = contract_from_form(TypeSpec, MFA, RecordsDict, FileLine),
%% io:format("contract: ~p\n", [TmpContract]),
- dict:store(MFA, {FileLine, TmpContract, Xtra}, SpecDict).
+ maps:put(MFA, {FileLine, TmpContract, Xtra}, SpecMap).
contract_from_form(Forms, MFA, RecDict, FileLine) ->
{CFuns, Forms1} = contract_from_form(Forms, MFA, RecDict, FileLine, [], []),
@@ -451,10 +451,10 @@ contract_from_form(Forms, MFA, RecDict, FileLine) ->
contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], MFA, RecDict,
FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords, Cache) ->
+ fun(ExpTypes, RecordTable, Cache) ->
{NewType, NewCache} =
try
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache)
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache)
catch
throw:{error, Msg} ->
{File, Line} = FileLine,
@@ -472,12 +472,12 @@ contract_from_form([{type, _L1, bounded_fun,
[{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
MFA, RecDict, FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords, Cache) ->
+ fun(ExpTypes, RecordTable, Cache) ->
{Constr1, VarTable, Cache1} =
- process_constraints(Constr, MFA, RecDict, ExpTypes, AllRecords,
+ process_constraints(Constr, MFA, RecDict, ExpTypes, RecordTable,
Cache),
{NewType, NewCache} =
- from_form_with_check(Form, ExpTypes, MFA, AllRecords,
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable,
VarTable, Cache1),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
{{NewTypeNoVars, Constr1}, NewCache}
@@ -488,28 +488,28 @@ contract_from_form([{type, _L1, bounded_fun,
contract_from_form([], _MFA, _RecDict, _FileLine, TypeAcc, FormAcc) ->
{lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
-process_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+process_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
{Init0, NewCache} = initialize_constraints(Constrs, MFA, RecDict, ExpTypes,
- AllRecords, Cache),
+ RecordTable, Cache),
Init = remove_cycles(Init0),
- constraints_fixpoint(Init, MFA, RecDict, ExpTypes, AllRecords, NewCache).
+ constraints_fixpoint(Init, MFA, RecDict, ExpTypes, RecordTable, NewCache).
-initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
- initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
+ initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable,
Cache, []).
-initialize_constraints([], _MFA, _RecDict, _ExpTypes, _AllRecords,
+initialize_constraints([], _MFA, _RecDict, _ExpTypes, _RecordTable,
Cache, Acc) ->
{Acc, Cache};
-initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords,
+initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, RecordTable,
Cache, Acc) ->
case Constr of
{type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} ->
VarTable = erl_types:var_table__new(),
{T1, NewCache} =
- final_form(Type1, ExpTypes, MFA, AllRecords, VarTable, Cache),
+ final_form(Type1, ExpTypes, MFA, RecordTable, VarTable, Cache),
Entry = {T1, Type2},
- initialize_constraints(Rest, MFA, RecDict, ExpTypes, AllRecords,
+ initialize_constraints(Rest, MFA, RecDict, ExpTypes, RecordTable,
NewCache, [Entry|Acc]);
{type, _, constraint, [{atom,_,Name}, List]} ->
N = length(List),
@@ -517,18 +517,18 @@ initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords,
io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])})
end.
-constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
VarTable = erl_types:var_table__new(),
{VarTab, NewCache} =
- constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTable, Cache),
constraints_fixpoint(VarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, NewCache).
+ RecordTable, NewCache).
constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, Cache) ->
+ RecordTable, Cache) ->
{NewVarTab, NewCache} =
- constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
OldVarTab, Cache),
case NewVarTab of
OldVarTab ->
@@ -540,38 +540,38 @@ constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes,
{FinalConstrs, NewVarTab, NewCache};
_Other ->
constraints_fixpoint(NewVarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, NewCache)
+ RecordTable, NewCache)
end.
-final_form(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) ->
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache).
+final_form(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache).
-from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache) ->
+from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache) ->
VarTable = erl_types:var_table__new(),
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache).
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache).
-from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) ->
+from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
Site = {spec, MFA},
- C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, AllRecords,
+ C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, RecordTable,
VarTable, Cache),
- erl_types:t_from_form(Form, ExpTypes, Site, AllRecords, VarTable, C1).
+ erl_types:t_from_form(Form, ExpTypes, Site, RecordTable, VarTable, C1).
-constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache) ->
{Subtypes, NewCache} =
- constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache, []),
{insert_constraints(Subtypes), NewCache}.
-constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _AllRecords,
+constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _RecordTable,
_VarTab, Cache, Acc) ->
{Acc, Cache};
-constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, AllRecords,
+constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache, Acc) ->
{T2, NewCache} =
- final_form(Form2, ExpTypes, MFA, AllRecords, VarTab, Cache),
+ final_form(Form2, ExpTypes, MFA, RecordTable, VarTab, Cache),
NewAcc = [{subtype, T1, T2}|Acc],
- constraints_to_subs(Rest, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_subs(Rest, MFA, RecDict, ExpTypes, RecordTable,
VarTab, NewCache, NewAcc).
%% Replaces variables with '_' when necessary to break up cycles among
@@ -670,7 +670,7 @@ get_invalid_contract_warnings(Modules, CodeServer, Plt, FindOpaques) ->
get_invalid_contract_warnings_modules([Mod|Mods], CodeServer, Plt, FindOpaques, Acc) ->
Contracts1 = dialyzer_codeserver:lookup_mod_contracts(Mod, CodeServer),
- Contracts2 = dict:to_list(Contracts1),
+ Contracts2 = maps:to_list(Contracts1),
Records = dialyzer_codeserver:lookup_mod_records(Mod, CodeServer),
NewAcc = get_invalid_contract_warnings_funs(Contracts2, Plt, Records, FindOpaques, Acc),
get_invalid_contract_warnings_modules(Mods, CodeServer, Plt, FindOpaques, NewAcc);
diff --git a/lib/dialyzer/src/dialyzer_coordinator.erl b/lib/dialyzer/src/dialyzer_coordinator.erl
index 99f95a4dca..7c1bc1de5a 100644
--- a/lib/dialyzer/src/dialyzer_coordinator.erl
+++ b/lib/dialyzer/src/dialyzer_coordinator.erl
@@ -76,6 +76,8 @@
active = 0 :: integer(),
result :: result(),
next_label = 0 :: integer(),
+ jobs :: [job()],
+ job_fun :: fun(),
init_data :: init_data(),
regulator :: regulator(),
scc_to_pid :: scc_to_pid()
@@ -108,16 +110,18 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
false -> unused
end,
Coordinator = {Collector, Regulator, SCCtoPID},
- Fold =
- fun(Job, Count) ->
- Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
- case TypesigOrDataflow of
- true -> true = ets:insert(SCCtoPID, {Job, Pid}), ok;
- false -> ok
- end,
- Count + 1
+ JobFun =
+ fun(Job) ->
+ Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
+ case TypesigOrDataflow of
+ true -> true = ets:insert(SCCtoPID, {Job, Pid});
+ false -> true
+ end
end,
- JobCount = lists:foldl(Fold, 0, Jobs),
+ JobCount = length(Jobs),
+ NumberOfInitJobs = min(JobCount, 20 * dialyzer_utils:parallelism()),
+ {InitJobs, RestJobs} = lists:split(NumberOfInitJobs, Jobs),
+ lists:foreach(JobFun, InitJobs),
Unit =
case Mode of
'typesig' -> "SCCs";
@@ -129,11 +133,13 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
'compile' -> dialyzer_analysis_callgraph:compile_init_result();
_ -> []
end,
- #state{mode = Mode, active = JobCount, result = InitResult, next_label = 0,
- init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}.
+ #state{mode = Mode, active = JobCount, result = InitResult,
+ next_label = 0, job_fun = JobFun, jobs = RestJobs,
+ init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}.
collect_result(#state{mode = Mode, active = Active, result = Result,
next_label = NextLabel, init_data = InitData,
+ jobs = JobsLeft, job_fun = JobFun,
regulator = Regulator, scc_to_pid = SCCtoPID} = State) ->
receive
{next_label_request, Estimation, Pid} ->
@@ -141,20 +147,35 @@ collect_result(#state{mode = Mode, active = Active, result = Result,
collect_result(State#state{next_label = NextLabel + Estimation});
{done, Job, Data} ->
NewResult = update_result(Mode, InitData, Job, Data, Result),
+ TypesigOrDataflow = (Mode =:= 'typesig') orelse (Mode =:= 'dataflow'),
case Active of
1 ->
kill_regulator(Regulator),
case Mode of
'compile' ->
{NewResult, NextLabel};
- X when X =:= 'typesig'; X =:= 'dataflow' ->
+ _ when TypesigOrDataflow ->
ets:delete(SCCtoPID),
NewResult;
'warnings' ->
NewResult
end;
N ->
- collect_result(State#state{result = NewResult, active = N - 1})
+ case TypesigOrDataflow of
+ true -> true = ets:delete(SCCtoPID, Job);
+ false -> true
+ end,
+ NewJobsLeft =
+ case JobsLeft of
+ [] -> [];
+ [NewJob|JobsLeft1] ->
+ JobFun(NewJob),
+ JobsLeft1
+ end,
+ NewState = State#state{result = NewResult,
+ jobs = NewJobsLeft,
+ active = N - 1},
+ collect_result(NewState)
end
end.
@@ -170,18 +191,20 @@ update_result(Mode, InitData, Job, Data, Result) ->
end.
-spec sccs_to_pids([scc() | module()], coordinator()) ->
- {[dialyzer_worker:worker()], [scc() | module()]}.
+ [dialyzer_worker:worker()].
sccs_to_pids(SCCs, {_Collector, _Regulator, SCCtoPID}) ->
Fold =
- fun(SCC, {Pids, Unknown}) ->
- try ets:lookup_element(SCCtoPID, SCC, 2) of
- Result -> {[Result|Pids], Unknown}
- catch
- _:_ -> {Pids, [SCC|Unknown]}
- end
+ fun(SCC, Pids) ->
+ %% The SCCs that SCC depends on have always been started.
+ try ets:lookup_element(SCCtoPID, SCC, 2) of
+ Pid when is_pid(Pid) ->
+ [Pid|Pids]
+ catch
+ _:_ -> Pids
+ end
end,
- lists:foldl(Fold, {[], []}, SCCs).
+ lists:foldl(Fold, [], SCCs).
-spec job_done(job(), job_result(), coordinator()) -> ok.
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index fdcf8269e4..4c29b4f1eb 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -522,7 +522,7 @@ handle_apply_or_call([{TypeOfApply, {Fun, Sig, Contr, LocalRet}}|Left],
case is_race_analysis_enabled(State) of
true ->
Ann = cerl:get_ann(Tree),
- File = get_file(Ann),
+ File = get_file(Ann, State),
Line = abs(get_line(Ann)),
dialyzer_races:store_race_call(Fun, ArgTypes, Args,
{File, Line}, State);
@@ -1344,8 +1344,6 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State, Warns) ->
{Msg, Force} =
case t_is_none(ArgType0) of
true ->
- PatString = format_patterns(Pats),
- PatTypes = [PatString, format_type(OrigArgType, State1)],
%% See if this is covered by an earlier clause or if it
%% simply cannot match
OrigArgTypes =
@@ -1353,17 +1351,27 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State, Warns) ->
true -> Any = t_any(), [Any || _ <- Pats];
false -> t_to_tlist(OrigArgType)
end,
+ PatString = format_patterns(Pats),
+ ArgTypeString = format_type(OrigArgType, State1),
+ BindResOrig =
+ bind_pat_vars(Pats, OrigArgTypes, [], Map1, State1),
Tag =
- case bind_pat_vars(Pats, OrigArgTypes, [], Map1, State1) of
+ case BindResOrig of
{error, bind, _, _, _} -> pattern_match;
{error, record, _, _, _} -> record_match;
{error, opaque, _, _, _} -> opaque_match;
{_, _} -> pattern_match_cov
end,
- {{Tag, PatTypes}, false};
+ PatTypes = case BindResOrig of
+ {error, opaque, _, _, OpaqueType} ->
+ [PatString, ArgTypeString,
+ format_type(OpaqueType, State1)];
+ _ -> [PatString, ArgTypeString]
+ end,
+ {{Tag, PatTypes}, false};
false ->
%% Try to find out if this is a default clause in a list
- %% comprehension and supress this. A real Hack(tm)
+ %% comprehension and suppress this. A real Hack(tm)
Force0 =
case is_compiler_generated(cerl:get_ann(C)) of
true ->
@@ -3083,7 +3091,7 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
Ann = cerl:get_ann(Tree),
case Force of
true ->
- WarningInfo = {get_file(Ann),
+ WarningInfo = {get_file(Ann, State),
abs(get_line(Ann)),
State#state.curr_fun},
Warn = {Tag, WarningInfo, Msg},
@@ -3093,7 +3101,9 @@ state__add_warning(#state{warnings = Warnings, warning_mode = true} = State,
case is_compiler_generated(Ann) of
true -> State;
false ->
- WarningInfo = {get_file(Ann), get_line(Ann), State#state.curr_fun},
+ WarningInfo = {get_file(Ann, State),
+ get_line(Ann),
+ State#state.curr_fun},
Warn = {Tag, WarningInfo, Msg},
case Tag of
?WARN_CONTRACT_RANGE -> ok;
@@ -3492,6 +3502,12 @@ state__put_races(Races, State) ->
state__records_only(#state{records = Records}) ->
#state{records = Records}.
+-spec state__translate_file(file:filename(), state()) -> file:filename().
+
+state__translate_file(FakeFile, State) ->
+ #state{codeserver = CodeServer, module = Module} = State,
+ dialyzer_codeserver:translate_fake_file(CodeServer, Module, FakeFile).
+
%%% ===========================================================================
%%%
%%% Races
@@ -3563,9 +3579,11 @@ get_line([Line|_]) when is_integer(Line) -> Line;
get_line([_|Tail]) -> get_line(Tail);
get_line([]) -> -1.
-get_file([]) -> [];
-get_file([{file, File}|_]) -> File;
-get_file([_|Tail]) -> get_file(Tail).
+get_file([], _State) -> [];
+get_file([{file, FakeFile}|_], State) ->
+ state__translate_file(FakeFile, State);
+get_file([_|Tail], State) ->
+ get_file(Tail, State).
is_compiler_generated(Ann) ->
lists:member(compiler_generated, Ann) orelse (get_line(Ann) < 1).
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.erl b/lib/dialyzer/src/dialyzer_gui_wx.erl
index 91f7fbe467..d1b955044b 100644
--- a/lib/dialyzer/src/dialyzer_gui_wx.erl
+++ b/lib/dialyzer/src/dialyzer_gui_wx.erl
@@ -498,8 +498,9 @@ gui_loop(#gui_state{backend_pid = BackendPid, doc_plt = DocPlt,
end,
ExplanationPid = spawn_link(Fun),
gui_loop(State#gui_state{expl_pid = ExplanationPid});
- {BackendPid, done, _NewPlt, NewDocPlt} ->
+ {BackendPid, done, NewMiniPlt, NewDocPlt} ->
message(State, "Analysis done"),
+ dialyzer_plt:delete(NewMiniPlt),
config_gui_stop(State),
gui_loop(State#gui_state{doc_plt = NewDocPlt});
{'EXIT', BackendPid, {error, Reason}} ->
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index 64b10af1ba..bfd3f84fc5 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -31,9 +31,8 @@
included_files/1,
from_file/1,
get_default_plt/0,
- get_types/1,
+ get_module_types/2,
get_exported_types/1,
- %% insert/3,
insert_list/2,
insert_contract_list/2,
insert_callbacks/2,
@@ -51,7 +50,9 @@
get_specs/4,
to_file/4,
get_mini_plt/1,
- restore_full_plt/2
+ restore_full_plt/1,
+ delete/1,
+ give_away/2
]).
%% Debug utilities
@@ -75,14 +76,16 @@
%%----------------------------------------------------------------------
-record(plt, {info = table_new() :: dict:dict(),
- types = table_new() :: dict:dict(),
+ types = table_new() :: erl_types:mod_records(),
contracts = table_new() :: dict:dict(),
callbacks = table_new() :: dict:dict(),
exported_types = sets:new() :: sets:set()}).
-record(mini_plt, {info :: ets:tid(),
+ types :: ets:tid(),
contracts :: ets:tid(),
- callbacks :: ets:tid()
+ callbacks :: ets:tid(),
+ exported_types :: ets:tid()
}).
-opaque plt() :: #plt{} | #mini_plt{}.
@@ -123,6 +126,10 @@ delete_module(#plt{info = Info, types = Types,
-spec delete_list(plt(), [mfa() | integer()]) -> plt().
+delete_list(#mini_plt{info = Info,
+ contracts = Contracts}=Plt, List) ->
+ Plt#mini_plt{info = ets_table_delete_list(Info, List),
+ contracts = ets_table_delete_list(Contracts, List)};
delete_list(#plt{info = Info, types = Types,
contracts = Contracts,
callbacks = Callbacks,
@@ -135,6 +142,10 @@ delete_list(#plt{info = Info, types = Types,
-spec insert_contract_list(plt(), dialyzer_contracts:plt_contracts()) -> plt().
+insert_contract_list(#plt{contracts = Contracts} = PLT, List) ->
+ NewContracts = dict:merge(fun(_MFA, _Old, New) -> New end,
+ Contracts, dict:from_list(List)),
+ PLT#plt{contracts = NewContracts};
insert_contract_list(#mini_plt{contracts = Contracts} = PLT, List) ->
true = ets:insert(Contracts, List),
PLT.
@@ -176,20 +187,23 @@ lookup(Plt, Label) when is_integer(Label) ->
lookup_1(#mini_plt{info = Info}, MFAorLabel) ->
ets_table_lookup(Info, MFAorLabel).
--spec insert_types(plt(), dict:dict()) -> plt().
+-spec insert_types(plt(), ets:tid()) -> plt().
-insert_types(PLT, Rec) ->
- PLT#plt{types = Rec}.
+insert_types(MiniPLT, Records) ->
+ ets:rename(Records, plt_types),
+ MiniPLT#mini_plt{types = Records}.
--spec insert_exported_types(plt(), sets:set()) -> plt().
+-spec insert_exported_types(plt(), ets:tid()) -> plt().
-insert_exported_types(PLT, Set) ->
- PLT#plt{exported_types = Set}.
+insert_exported_types(MiniPLT, ExpTypes) ->
+ ets:rename(ExpTypes, plt_exported_types),
+ MiniPLT#mini_plt{exported_types = ExpTypes}.
--spec get_types(plt()) -> dict:dict().
+-spec get_module_types(plt(), atom()) ->
+ 'none' | {'value', erl_types:type_table()}.
-get_types(#plt{types = Types}) ->
- Types.
+get_module_types(#plt{types = Types}, M) when is_atom(M) ->
+ table_lookup(Types, M).
-spec get_exported_types(plt()) -> sets:set().
@@ -219,12 +233,8 @@ contains_mfa(#plt{info = Info, contracts = Contracts}, MFA) ->
get_default_plt() ->
case os:getenv("DIALYZER_PLT") of
false ->
- case os:getenv("HOME") of
- false ->
- plt_error("The HOME environment variable needs to be set " ++
- "so that Dialyzer knows where to find the default PLT");
- HomeDir -> filename:join(HomeDir, ".dialyzer_plt")
- end;
+ {ok,[[HomeDir]]} = init:get_argument(home),
+ filename:join(HomeDir, ".dialyzer_plt");
UserSpecPlt -> UserSpecPlt
end.
@@ -246,8 +256,10 @@ from_file(FileName, ReturnInfo) ->
Msg = io_lib:format("Old PLT file ~s\n", [FileName]),
plt_error(Msg);
ok ->
+ Types = [{Mod, maps:from_list(dict:to_list(Types))} ||
+ {Mod, Types} <- dict:to_list(Rec#file_plt.types)],
Plt = #plt{info = Rec#file_plt.info,
- types = Rec#file_plt.types,
+ types = dict:from_list(Types),
contracts = Rec#file_plt.contracts,
callbacks = Rec#file_plt.callbacks,
exported_types = Rec#file_plt.exported_types},
@@ -364,12 +376,14 @@ to_file(FileName,
end,
OldModDeps, ModDeps),
ImplMd5 = compute_implementation_md5(),
+ FileTypes = dict:from_list([{Mod, dict:from_list(maps:to_list(MTypes))} ||
+ {Mod, MTypes} <- dict:to_list(Types)]),
Record = #file_plt{version = ?VSN,
file_md5_list = MD5,
info = Info,
contracts = Contracts,
callbacks = Callbacks,
- types = Types,
+ types = FileTypes,
exported_types = ExpTypes,
mod_deps = NewModDeps,
implementation_md5 = ImplMd5},
@@ -503,32 +517,102 @@ init_md5_list_1(Md5List, [], Acc) ->
-spec get_mini_plt(plt()) -> plt().
-get_mini_plt(#plt{info = Info, contracts = Contracts, callbacks = Callbacks}) ->
- [ETSInfo, ETSContracts, ETSCallbacks] =
- [ets:new(Name, [public]) || Name <- [plt_info, plt_contracts, plt_callbacks]],
+get_mini_plt(#plt{info = Info,
+ types = Types,
+ contracts = Contracts,
+ callbacks = Callbacks,
+ exported_types = ExpTypes}) ->
+ [ETSInfo, ETSContracts] =
+ [ets:new(Name, [public]) ||
+ Name <- [plt_info, plt_contracts]],
+ [ETSTypes, ETSCallbacks, ETSExpTypes] =
+ [ets:new(Name, [compressed, public]) ||
+ Name <- [plt_types, plt_callbacks, plt_exported_types]],
CallbackList = dict:to_list(Callbacks),
CallbacksByModule =
[{M, [Cb || {{M1,_,_},_} = Cb <- CallbackList, M1 =:= M]} ||
M <- lists:usort([M || {{M,_,_},_} <- CallbackList])],
- [true, true] =
+ [true, true, true] =
[ets:insert(ETS, dict:to_list(Data)) ||
- {ETS, Data} <- [{ETSInfo, Info}, {ETSContracts, Contracts}]],
+ {ETS, Data} <- [{ETSInfo, Info},
+ {ETSTypes, Types},
+ {ETSContracts, Contracts}]],
true = ets:insert(ETSCallbacks, CallbacksByModule),
- #mini_plt{info = ETSInfo, contracts = ETSContracts, callbacks = ETSCallbacks};
+ true = ets:insert(ETSExpTypes, [{ET} || ET <- sets:to_list(ExpTypes)]),
+ #mini_plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes};
get_mini_plt(undefined) ->
undefined.
--spec restore_full_plt(plt(), plt()) -> plt().
-
-restore_full_plt(#mini_plt{info = ETSInfo, contracts = ETSContracts}, Plt) ->
- Info = dict:from_list(ets:tab2list(ETSInfo)),
- Contracts = dict:from_list(ets:tab2list(ETSContracts)),
- ets:delete(ETSContracts),
- ets:delete(ETSInfo),
- Plt#plt{info = Info, contracts = Contracts};
-restore_full_plt(undefined, undefined) ->
+-spec restore_full_plt(plt()) -> plt().
+
+restore_full_plt(#mini_plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes} = MiniPlt) ->
+ Info = dict:from_list(tab2list(ETSInfo)),
+ Contracts = dict:from_list(tab2list(ETSContracts)),
+ Types = dict:from_list(tab2list(ETSTypes)),
+ Callbacks =
+ dict:from_list([Cb || {_M, Cbs} <- tab2list(ETSCallbacks), Cb <- Cbs]),
+ ExpTypes = sets:from_list([E || {E} <- tab2list(ETSExpTypes)]),
+ ok = delete(MiniPlt),
+ #plt{info = Info,
+ types = Types,
+ contracts = Contracts,
+ callbacks = Callbacks,
+ exported_types = ExpTypes};
+restore_full_plt(undefined) ->
undefined.
+-spec delete(plt()) -> 'ok'.
+
+delete(#mini_plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes}) ->
+ true = ets:delete(ETSContracts),
+ true = ets:delete(ETSTypes),
+ true = ets:delete(ETSInfo),
+ true = ets:delete(ETSCallbacks),
+ true = ets:delete(ETSExpTypes),
+ ok.
+
+-spec give_away(plt(), pid()) -> 'ok'.
+
+give_away(#mini_plt{info = ETSInfo,
+ types = ETSTypes,
+ contracts = ETSContracts,
+ callbacks = ETSCallbacks,
+ exported_types = ETSExpTypes},
+ Pid) ->
+ true = ets:give_away(ETSContracts, Pid, any),
+ true = ets:give_away(ETSTypes, Pid, any),
+ true = ets:give_away(ETSInfo, Pid, any),
+ true = ets:give_away(ETSCallbacks, Pid, any),
+ true = ets:give_away(ETSExpTypes, Pid, any),
+ ok.
+
+%% Somewhat slower than ets:tab2list(), but uses less memory.
+tab2list(T) ->
+ tab2list(ets:first(T), T, []).
+
+tab2list('$end_of_table', T, A) ->
+ case ets:first(T) of % no safe_fixtable()...
+ '$end_of_table' -> A;
+ Key -> tab2list(Key, T, A)
+ end;
+tab2list(Key, T, A) ->
+ Vs = ets:lookup(T, Key),
+ Key1 = ets:next(T, Key),
+ ets:delete(T, Key),
+ tab2list(Key1, T, Vs ++ A).
+
%%---------------------------------------------------------------------------
%% Edoc
@@ -600,6 +684,12 @@ table_delete_module1(Plt, Mod) ->
table_delete_module2(Plt, Mod) ->
dict:filter(fun(M, _Val) -> M =/= Mod end, Plt).
+ets_table_delete_list(Tab, [H|T]) ->
+ ets:delete(Tab, H),
+ ets_table_delete_list(Tab, T);
+ets_table_delete_list(Tab, []) ->
+ Tab.
+
table_delete_list(Plt, [H|T]) ->
table_delete_list(dict:erase(H, Plt), T);
table_delete_list(Plt, []) ->
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index df12796dd4..be685baf22 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -29,7 +29,7 @@
-export([
find_succ_types_for_scc/2,
refine_one_module/2,
- find_required_by/2,
+ %% find_required_by/2,
find_depends_on/2,
collect_warnings/2,
lookup_names/2
@@ -89,7 +89,7 @@ analyze_callgraph(Callgraph, Plt, Codeserver, TimingServer, Solvers, Parent) ->
NewState =
init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
TimingServer, Solvers, Parent),
- dialyzer_plt:restore_full_plt(NewState#st.plt, Plt).
+ NewState#st.plt.
%%--------------------------------------------------------------------
@@ -104,6 +104,7 @@ init_state_and_get_success_typings(Callgraph, Plt, Codeserver,
get_refined_success_typings(SCCs, #st{callgraph = Callgraph,
timing_server = TimingServer} = State) ->
+ erlang:garbage_collect(),
case find_succ_typings(SCCs, State) of
{fixpoint, State1} -> State1;
{not_fixpoint, NotFixpoint1, State1} ->
@@ -148,8 +149,8 @@ get_warnings(Callgraph, Plt, DocPlt, Codeserver,
?timing(TimingServer, "warning",
get_warnings_from_modules(Mods, InitState, MiniDocPlt)),
{postprocess_warnings(CWarns ++ ModWarns, Codeserver),
- dialyzer_plt:restore_full_plt(MiniPlt, Plt),
- dialyzer_plt:restore_full_plt(MiniDocPlt, DocPlt)}.
+ MiniPlt,
+ dialyzer_plt:restore_full_plt(MiniDocPlt)}.
get_warnings_from_modules(Mods, State, DocPlt) ->
#st{callgraph = Callgraph, codeserver = Codeserver,
@@ -167,10 +168,10 @@ collect_warnings(M, {Codeserver, Callgraph, Plt, DocPlt}) ->
%% Check if there are contracts for functions that do not exist
Warnings1 =
dialyzer_contracts:contracts_without_fun(Contracts, AllFuns, Callgraph),
+ Attrs = cerl:module_attrs(ModCode),
{Warnings2, FunTypes} =
dialyzer_dataflow:get_warnings(ModCode, Plt, Callgraph, Codeserver,
Records),
- Attrs = cerl:module_attrs(ModCode),
Warnings3 =
dialyzer_behaviours:check_callbacks(M, Attrs, Records, Plt, Codeserver),
DocPlt = insert_into_doc_plt(FunTypes, Callgraph, DocPlt),
@@ -235,10 +236,10 @@ refine_succ_typings(Modules, #st{codeserver = Codeserver,
find_depends_on(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
dialyzer_callgraph:get_depends_on(SCC, Callgraph).
--spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()].
+%% -spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()].
-find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
- dialyzer_callgraph:get_required_by(SCC, Callgraph).
+%% find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
+%% dialyzer_callgraph:get_required_by(SCC, Callgraph).
-spec lookup_names([label()], fixpoint_init_data()) -> [mfa_or_funlbl()].
@@ -255,7 +256,7 @@ refine_one_module(M, {CodeServer, Callgraph, Plt, _Solvers}) ->
NewFunTypes =
dialyzer_dataflow:get_fun_types(ModCode, Plt, Callgraph, CodeServer, Records),
Contracts1 = dialyzer_codeserver:lookup_mod_contracts(M, CodeServer),
- Contracts = orddict:from_list(dict:to_list(Contracts1)),
+ Contracts = orddict:from_list(maps:to_list(Contracts1)),
FindOpaques = find_opaques_fun(Records),
DecoratedFunTypes =
decorate_succ_typings(Contracts, Callgraph, NewFunTypes, FindOpaques),
@@ -341,21 +342,25 @@ find_succ_typings(SCCs, #st{codeserver = Codeserver, callgraph = Callgraph,
-spec find_succ_types_for_scc(scc(), typesig_init_data()) -> [mfa_or_funlbl()].
-find_succ_types_for_scc(SCC, {Codeserver, Callgraph, Plt, Solvers}) ->
- SCC_Info = [{MFA,
- dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
- dialyzer_codeserver:lookup_mod_records(M, Codeserver)}
- || {M, _, _} = MFA <- SCC],
+find_succ_types_for_scc(SCC0, {Codeserver, Callgraph, Plt, Solvers}) ->
+ SCC = [MFA || {_, _, _} = MFA <- SCC0],
Contracts1 = [{MFA, dialyzer_codeserver:lookup_mfa_contract(MFA, Codeserver)}
- || {_, _, _} = MFA <- SCC],
+ || MFA <- SCC],
Contracts2 = [{MFA, Contract} || {MFA, {ok, Contract}} <- Contracts1],
Contracts3 = orddict:from_list(Contracts2),
Label = dialyzer_codeserver:get_next_core_label(Codeserver),
- AllFuns = collect_fun_info([Fun || {_MFA, {_Var, Fun}, _Rec} <- SCC_Info]),
+ AllFuns = lists:append(
+ [begin
+ {_Var, Fun} =
+ dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
+ collect_fun_info([Fun])
+ end || MFA <- SCC]),
+ erlang:garbage_collect(),
PropTypes = get_fun_types_from_plt(AllFuns, Callgraph, Plt),
%% Assume that the PLT contains the current propagated types
- FunTypes = dialyzer_typesig:analyze_scc(SCC_Info, Label, Callgraph,
- Plt, PropTypes, Solvers),
+ FunTypes = dialyzer_typesig:analyze_scc(SCC, Label, Callgraph,
+ Codeserver, Plt, PropTypes,
+ Solvers),
AllFunSet = sets:from_list([X || {X, _} <- AllFuns]),
FilteredFunTypes =
dict:filter(fun(X, _) -> sets:is_element(X, AllFunSet) end, FunTypes),
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index 457db9df83..c3ba44fde7 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -22,7 +22,7 @@
-module(dialyzer_typesig).
--export([analyze_scc/6]).
+-export([analyze_scc/7]).
-export([get_safe_underapprox/2]).
%%-import(helper, %% 'helper' could be any module doing sanity checks...
@@ -81,7 +81,7 @@
-record(constraint_list, {type :: 'conj' | 'disj',
list :: [constr()],
deps :: deps(),
- masks = maps:new() :: #{dep() => mask()},
+ masks :: #{dep() => mask()} | 'undefined',
id :: {'list', dep()} | 'undefined'}).
-type constraint_list() :: #constraint_list{}.
@@ -94,10 +94,9 @@
-type types() :: erl_types:type_table().
--type typesig_scc() :: [{mfa(), {cerl:c_var(), cerl:c_fun()}, types()}].
-type typesig_funmap() :: #{type_var() => type_var()}.
--type prop_types() :: dict:dict(label(), types()).
+-type prop_types() :: dict:dict(label(), erl_types:erl_type()).
-record(state, {callgraph :: dialyzer_callgraph:callgraph()
| 'undefined',
@@ -114,7 +113,7 @@
plt :: dialyzer_plt:plt()
| 'undefined',
prop_types = dict:new() :: prop_types(),
- records = dict:new() :: types(),
+ records = maps:new() :: types(),
scc = [] :: ordsets:ordset(type_var()),
mfas :: [mfa()],
solvers = [] :: [solver()]
@@ -153,11 +152,10 @@
%%-----------------------------------------------------------------------------
%% Analysis of strongly connected components.
%%
-%% analyze_scc(SCC, NextLabel, CallGraph, PLT, PropTypes, Solvers) -> FunTypes
+%% analyze_scc(SCC, NextLabel, CallGraph, CodeServer,
+%% PLT, PropTypes, Solvers) -> FunTypes
%%
-%% SCC - [{MFA, Def, Records}]
-%% where Def = {Var, Fun} as in the Core Erlang module definitions.
-%% Records = dict(RecName, {Arity, [{FieldName, FieldType}]})
+%% SCC - [{MFA}]
%% NextLabel - An integer that is higher than any label in the code.
%% CallGraph - A callgraph as produced by dialyzer_callgraph.erl
%% Note: The callgraph must have been built with all the
@@ -169,16 +167,19 @@
%% Solvers - User specified solvers.
%%-----------------------------------------------------------------------------
--spec analyze_scc(typesig_scc(), label(),
+-spec analyze_scc([mfa()], label(),
dialyzer_callgraph:callgraph(),
+ dialyzer_codeserver:codeserver(),
dialyzer_plt:plt(), prop_types(), [solver()]) -> prop_types().
-analyze_scc(SCC, NextLabel, CallGraph, Plt, PropTypes, Solvers0) ->
+analyze_scc(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers0) ->
Solvers = solvers(Solvers0),
- assert_format_of_scc(SCC),
- State1 = new_state(SCC, NextLabel, CallGraph, Plt, PropTypes, Solvers),
- DefSet = add_def_list([Var || {_MFA, {Var, _Fun}, _Rec} <- SCC], sets:new()),
- State2 = traverse_scc(SCC, DefSet, State1),
+ State1 = new_state(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes,
+ Solvers),
+ DefSet = add_def_list(maps:values(State1#state.name_map), sets:new()),
+ ModRecs = [{M, dialyzer_codeserver:lookup_mod_records(M, CServer)} ||
+ M <- lists:usort([M || {M, _, _} <- SCC])],
+ State2 = traverse_scc(SCC, CServer, DefSet, ModRecs, State1),
State3 = state__finalize(State2),
Funs = state__scc(State3),
pp_constrs_scc(Funs, State3),
@@ -186,11 +187,6 @@ analyze_scc(SCC, NextLabel, CallGraph, Plt, PropTypes, Solvers0) ->
T = solve(Funs, State3),
dict:from_list(maps:to_list(T)).
-assert_format_of_scc([{_MFA, {_Var, _Fun}, _Records}|Left]) ->
- assert_format_of_scc(Left);
-assert_format_of_scc([]) ->
- ok.
-
solvers([]) -> [v2];
solvers(Solvers) -> Solvers.
@@ -200,12 +196,15 @@ solvers(Solvers) -> Solvers.
%%
%% ============================================================================
-traverse_scc([{_MFA, Def, Rec}|Left], DefSet, AccState) ->
+traverse_scc([{M,_,_}=MFA|Left], Codeserver, DefSet, ModRecs, AccState) ->
+ Def = dialyzer_codeserver:lookup_mfa_code(MFA, Codeserver),
+ {M, Rec} = lists:keyfind(M, 1, ModRecs),
TmpState1 = state__set_rec_dict(AccState, Rec),
DummyLetrec = cerl:c_letrec([Def], cerl:c_atom(foo)),
- {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState1),
- traverse_scc(Left, DefSet, NewAccState);
-traverse_scc([], _DefSet, AccState) ->
+ TmpState2 = state__new_constraint_context(TmpState1),
+ {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState2),
+ traverse_scc(Left, Codeserver, DefSet, ModRecs, NewAccState);
+traverse_scc([], _Codeserver, _DefSet, _ModRecs, AccState) ->
AccState.
traverse(Tree, DefinedVars, State) ->
@@ -2099,6 +2098,12 @@ v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval,
end;
v2_solve_disj([], [], _I, _Map, V2State, UL, MapL, Eval, Uneval, Failed) ->
{ok, V2State, lists:reverse(Eval), UL, MapL, lists:reverse(Uneval), Failed};
+v2_solve_disj([every_i], Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed) ->
+ NewIs = case Cs of
+ [] -> [];
+ _ -> [I, every_i]
+ end,
+ v2_solve_disj(NewIs, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed);
v2_solve_disj(Is, [C|Cs], I, Map, V2State, UL, MapL, Eval, Uneval0, Failed) ->
Uneval = [{I,C#constraint_list.id} ||
not is_failed_list(C, V2State)] ++ Uneval0,
@@ -2170,7 +2175,7 @@ v2_solve_conj([I|Is], [Cs|Tail], I, Map0, Conj, IsFlat, V2State0,
M = lists:keydelete(I, 1, vars_per_child(U, Masks)),
{V2State2, NewF0} = save_updated_vars_list(AllCs, M, V2State1),
{NewF, F} = lists:splitwith(fun(J) -> J < I end, NewF0),
- Is1 = lists:umerge(Is, F),
+ Is1 = umerge_mask(Is, F),
NewFs = [NewF|NewFs0],
v2_solve_conj(Is1, Tail, I+1, Map, Conj, IsFlat, V2State2,
[U|UL], NewFs, VarsUp, LastMap, LastFlags)
@@ -2192,6 +2197,14 @@ v2_solve_conj([], _Cs, _I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
v2_solve_conj(NewFlags, Cs, 1, Map, Conj, IsFlat, V2State,
[], [], [U|VarsUp], Map, NewFlags)
end;
+v2_solve_conj([every_i], Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
+ LastMap, LastFlags) ->
+ NewIs = case Cs of
+ [] -> [];
+ _ -> [I, every_i]
+ end,
+ v2_solve_conj(NewIs, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
+ LastMap, LastFlags);
v2_solve_conj(Is, [_|Tail], I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
LastMap, LastFlags) ->
v2_solve_conj(Is, Tail, I+1, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
@@ -2208,7 +2221,12 @@ report_detected_loop(_) ->
add_mask_to_flags(Flags, [Im|M], I, L) when I > Im ->
add_mask_to_flags(Flags, M, I, [Im|L]);
add_mask_to_flags(Flags, [_|M], _I, L) ->
- {lists:umerge(M, Flags), lists:reverse(L)}.
+ {umerge_mask(Flags, M), lists:reverse(L)}.
+
+umerge_mask([every_i]=Is, _F) ->
+ Is;
+umerge_mask(Is, F) ->
+ lists:umerge(Is, F).
get_mask(V, Masks) ->
case maps:find(V, Masks) of
@@ -2222,7 +2240,7 @@ get_flags(#v2_state{constr_data = ConData}=V2State0, C) ->
error ->
?debug("get_flags Id=~w Flags=all ~w\n", [Id, length(Cs)]),
V2State = V2State0#v2_state{constr_data = maps:put(Id, {[],[]}, ConData)},
- {V2State, lists:seq(1, length(Cs))};
+ {V2State, [every_i]};
{ok, failed} ->
{V2State0, failed_list};
{ok, {Part,U}} when U =/= [] ->
@@ -2695,11 +2713,14 @@ pp_map(_S, _Map) ->
%%
%% ============================================================================
-new_state(SCC0, NextLabel, CallGraph, Plt, PropTypes, Solvers) ->
- List = [{MFA, Var} || {MFA, {Var, _Fun}, _Rec} <- SCC0],
+new_state(MFAs, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers) ->
+ List_SCC =
+ [begin
+ {Var, Label} = dialyzer_codeserver:lookup_mfa_var_label(MFA, CServer),
+ {{MFA, Var}, t_var(Label)}
+ end || MFA <- MFAs],
+ {List, SCC} = lists:unzip(List_SCC),
NameMap = maps:from_list(List),
- MFAs = [MFA || {MFA, _Var} <- List],
- SCC = [mk_var(Fun) || {_MFA, {_Var, Fun}, _Rec} <- SCC0],
SelfRec =
case SCC of
[OneF] ->
@@ -2899,8 +2920,9 @@ state__get_rec_var(Fun, #state{fun_map = Map}) ->
maps:find(Fun, Map).
state__finalize(State) ->
- State1 = enumerate_constraints(State),
- order_fun_constraints(State1).
+ State1 = state__new_constraint_context(State),
+ State2 = enumerate_constraints(State1),
+ order_fun_constraints(State2).
%% ============================================================================
%%
@@ -2980,7 +3002,7 @@ find_constraint_deps([Type|Tail], Acc) ->
NewAcc = [[t_var_name(D) || D <- t_collect_vars(Type)]|Acc],
find_constraint_deps(Tail, NewAcc);
find_constraint_deps([], Acc) ->
- lists:flatten(Acc).
+ lists:append(Acc).
mk_constraint_1(Lhs, eq, Rhs, Deps) when Lhs < Rhs ->
#constraint{lhs = Lhs, op = eq, rhs = Rhs, deps = Deps};
@@ -3088,8 +3110,8 @@ expand_to_conjunctions(#constraint_list{type = disj, list = List}) ->
List1 = [C || C <- List, is_simple_constraint(C)],
%% Just an assert.
[] = [C || #constraint{} = C <- List1],
- Expanded = lists:flatten([expand_to_conjunctions(C)
- || #constraint_list{} = C <- List]),
+ Expanded = lists:append([expand_to_conjunctions(C)
+ || #constraint_list{} = C <- List]),
ReturnList = Expanded ++ List1,
if length(ReturnList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj);
true -> ReturnList
@@ -3114,8 +3136,10 @@ calculate_deps(List) ->
calculate_deps([H|Tail], Acc) ->
Deps = get_deps(H),
calculate_deps(Tail, [Deps|Acc]);
+calculate_deps([], []) -> [];
+calculate_deps([], [L]) -> L;
calculate_deps([], Acc) ->
- ordsets:from_list(lists:flatten(Acc)).
+ lists:umerge(Acc).
mk_conj_constraint_list(List) ->
mk_constraint_list(conj, List).
@@ -3183,7 +3207,8 @@ order_fun_constraints(State) ->
order_fun_constraints([#constraint_ref{id = Id}|Tail], State) ->
Cs = state__get_cs(Id, State),
- {[NewCs], State1} = order_fun_constraints([Cs], [], [], State),
+ {[Cs1], State1} = order_fun_constraints([Cs], [], [], State),
+ NewCs = Cs1#constraint_list{deps = Cs#constraint_list.deps},
NewState = state__store_constrs(Id, NewCs, State1),
order_fun_constraints(Tail, NewState);
order_fun_constraints([], State) ->
@@ -3191,23 +3216,31 @@ order_fun_constraints([], State) ->
order_fun_constraints([#constraint_ref{} = C|Tail], Funs, Acc, State) ->
order_fun_constraints(Tail, [C|Funs], Acc, State);
-order_fun_constraints([#constraint_list{list = List, type = Type} = C|Tail],
+order_fun_constraints([#constraint_list{list = List,
+ type = Type,
+ masks = OldMasks} = C|Tail],
Funs, Acc, State) ->
- {NewList, NewState} =
- case Type of
- conj -> order_fun_constraints(List, [], [], State);
- disj ->
- FoldFun = fun(X, AccState) ->
- {[NewX], NewAccState} =
- order_fun_constraints([X], [], [], AccState),
- {NewX, NewAccState}
- end,
- lists:mapfoldl(FoldFun, State, List)
- end,
- C1 = update_constraint_list(C, NewList),
- Masks = calculate_masks(NewList, 1, []),
- NewAcc = [update_masks(C1, Masks)|Acc],
- order_fun_constraints(Tail, Funs, NewAcc, NewState);
+ case OldMasks of
+ undefined ->
+ {NewList, NewState} =
+ case Type of
+ conj -> order_fun_constraints(List, [], [], State);
+ disj ->
+ FoldFun = fun(X, AccState) ->
+ {[NewX], NewAccState} =
+ order_fun_constraints([X], [], [], AccState),
+ {NewX, NewAccState}
+ end,
+ lists:mapfoldl(FoldFun, State, List)
+ end,
+ NewList2 = reset_deps(NewList, State),
+ C1 = update_constraint_list(C, NewList2),
+ Masks = calculate_masks(NewList, 1, []),
+ NewAcc = [update_masks(C1, Masks)|Acc],
+ order_fun_constraints(Tail, Funs, NewAcc, NewState);
+ M when is_map(M) ->
+ order_fun_constraints(Tail, Funs, [C|Acc], State)
+ end;
order_fun_constraints([#constraint{} = C|Tail], Funs, Acc, State) ->
order_fun_constraints(Tail, Funs, [C|Acc], State);
order_fun_constraints([], Funs, Acc, State) ->
@@ -3217,6 +3250,18 @@ order_fun_constraints([], Funs, Acc, State) ->
update_masks(C, Masks) ->
C#constraint_list{masks = Masks}.
+reset_deps(ConstrList, #state{solvers = Solvers}) ->
+ case lists:member(v1, Solvers) of
+ true ->
+ ConstrList;
+ false ->
+ [reset_deps(Constr) || Constr <- ConstrList]
+ end.
+
+reset_deps(#constraint{}=C) -> C#constraint{deps = []};
+reset_deps(#constraint_list{}=C) -> C#constraint_list{deps = []};
+reset_deps(#constraint_ref{}=C) -> C#constraint_ref{deps = []}.
+
calculate_masks([C|Cs], I, L0) ->
calculate_masks(Cs, I+1, [{V, I} || V <- get_deps(C)] ++ L0);
calculate_masks([], _I, L) ->
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 8480129dab..9eaf95c1a2 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -37,9 +37,9 @@
get_fun_meta_info/3,
is_suppressed_fun/2,
is_suppressed_tag/3,
- merge_records/2,
pp_hook/0,
process_record_remote_types/1,
+ merge_types/2,
sets_filter/2,
src_compiler_opts/0,
refold_pattern/1,
@@ -188,14 +188,13 @@ get_core_from_abstract_code(AbstrCode, Opts) ->
%% ============================================================================
-type type_table() :: erl_types:type_table().
--type mod_records() :: dict:dict(module(), type_table()).
-spec get_record_and_type_info(abstract_code()) ->
{'ok', type_table()} | {'error', string()}.
get_record_and_type_info(AbstractCode) ->
Module = get_module(AbstractCode),
- get_record_and_type_info(AbstractCode, Module, dict:new()).
+ get_record_and_type_info(AbstractCode, Module, maps:new()).
-spec get_record_and_type_info(abstract_code(), module(), type_table()) ->
{'ok', type_table()} | {'error', string()}.
@@ -208,7 +207,7 @@ get_record_and_type_info([{attribute, A, record, {Name, Fields0}}|Left],
{ok, Fields} = get_record_fields(Fields0, RecDict),
Arity = length(Fields),
FN = {File, erl_anno:line(A)},
- NewRecDict = dict:store({record, Name}, {FN, [{Arity,Fields}]}, RecDict),
+ NewRecDict = maps:put({record, Name}, {FN, [{Arity,Fields}]}, RecDict),
get_record_and_type_info(Left, Module, NewRecDict, File);
get_record_and_type_info([{attribute, A, type, {{record, Name}, Fields0, []}}
|Left], Module, RecDict, File) ->
@@ -216,7 +215,7 @@ get_record_and_type_info([{attribute, A, type, {{record, Name}, Fields0, []}}
{ok, Fields} = get_record_fields(Fields0, RecDict),
Arity = length(Fields),
FN = {File, erl_anno:line(A)},
- NewRecDict = dict:store({record, Name}, {FN, [{Arity, Fields}]}, RecDict),
+ NewRecDict = maps:put({record, Name}, {FN, [{Arity, Fields}]}, RecDict),
get_record_and_type_info(Left, Module, NewRecDict, File);
get_record_and_type_info([{attribute, A, Attr, {Name, TypeForm}}|Left],
Module, RecDict, File)
@@ -256,9 +255,9 @@ add_new_type(TypeOrOpaque, Name, TypeForm, ArgForms, Module, FN,
false ->
try erl_types:t_var_names(ArgForms) of
ArgNames ->
- dict:store({TypeOrOpaque, Name, Arity},
- {{Module, FN, TypeForm, ArgNames},
- erl_types:t_any()}, RecDict)
+ maps:put({TypeOrOpaque, Name, Arity},
+ {{Module, FN, TypeForm, ArgNames},
+ erl_types:t_any()}, RecDict)
catch
_:_ ->
throw({error, flat_format("Type declaration for ~w does not "
@@ -293,15 +292,14 @@ get_record_fields([], _RecDict, Acc) ->
%% The field types are cached. Used during analysis when handling records.
process_record_remote_types(CServer) ->
- TempRecords = dialyzer_codeserver:get_temp_records(CServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CServer),
- Cache = erl_types:cache__new(),
- {TempRecords1, Cache1} =
- process_opaque_types0(TempRecords, ExpTypes, Cache),
- %% A cache (not the field type cache) is used for speeding things up a bit.
+ Mods = dialyzer_codeserver:all_temp_modules(CServer),
+ process_opaque_types0(Mods, CServer, ExpTypes),
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
ModuleFun =
- fun({Module, Record}, C0) ->
+ fun(Module) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
RecordFun =
fun({Key, Value}, C2) ->
case Key of
@@ -314,7 +312,7 @@ process_record_remote_types(CServer) ->
{FieldT, C6} =
erl_types:t_from_form
(Field, ExpTypes, Site,
- TempRecords1, VarTable,
+ RecordTable, VarTable,
C5),
{{FieldName, Field, FieldT}, C6}
end, C4, Fields),
@@ -327,29 +325,31 @@ process_record_remote_types(CServer) ->
_Other -> {{Key, Value}, C2}
end
end,
- {RecordList, C1} =
- lists:mapfoldl(RecordFun, C0, dict:to_list(Record)),
- {{Module, dict:from_list(RecordList)}, C1}
+ Cache = erl_types:cache__new(),
+ {RecordList, _NewCache} =
+ lists:mapfoldl(RecordFun, Cache, maps:to_list(RecordMap)),
+ dialyzer_codeserver:store_temp_records(Module,
+ maps:from_list(RecordList),
+ CServer)
end,
- {NewRecordsList, C1} =
- lists:mapfoldl(ModuleFun, Cache1, dict:to_list(TempRecords1)),
- NewRecords = dict:from_list(NewRecordsList),
- _C8 = check_record_fields(NewRecords, ExpTypes, C1),
- dialyzer_codeserver:finalize_records(NewRecords, CServer).
+ lists:foreach(ModuleFun, Mods),
+ check_record_fields(Mods, CServer, ExpTypes),
+ dialyzer_codeserver:finalize_records(CServer).
%% erl_types:t_from_form() substitutes the declaration of opaque types
%% for the expanded type in some cases. To make sure the initial type,
%% any(), is not used, the expansion is done twice.
%% XXX: Recursive opaque types are not handled well.
-process_opaque_types0(TempRecords0, TempExpTypes, Cache) ->
- {TempRecords1, NewCache} =
- process_opaque_types(TempRecords0, TempExpTypes, Cache),
- process_opaque_types(TempRecords1, TempExpTypes, NewCache).
+process_opaque_types0(AllModules, CServer, TempExpTypes) ->
+ process_opaque_types(AllModules, CServer, TempExpTypes),
+ process_opaque_types(AllModules, CServer, TempExpTypes).
-process_opaque_types(TempRecords, TempExpTypes, Cache) ->
+process_opaque_types(AllModules, CServer, TempExpTypes) ->
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
ModuleFun =
- fun({Module, Record}, C0) ->
+ fun(Module) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
RecordFun =
fun({Key, Value}, C2) ->
case Key of
@@ -358,31 +358,32 @@ process_opaque_types(TempRecords, TempExpTypes, Cache) ->
Site = {type, {Module, Name, NArgs}},
{Type, C3} =
erl_types:t_from_form(Form, TempExpTypes, Site,
- TempRecords, VarTable, C2),
+ RecordTable, VarTable, C2),
{{Key, {F, Type}}, C3};
_Other -> {{Key, Value}, C2}
end
end,
- {RecordList, C1} =
- lists:mapfoldl(RecordFun, C0, dict:to_list(Record)),
- {{Module, dict:from_list(RecordList)}, C1}
- %% dict:map(RecordFun, Record)
+ C0 = erl_types:cache__new(),
+ {RecordList, _NewCache} =
+ lists:mapfoldl(RecordFun, C0, maps:to_list(RecordMap)),
+ dialyzer_codeserver:store_temp_records(Module,
+ maps:from_list(RecordList),
+ CServer)
end,
- {TempRecordList, NewCache} =
- lists:mapfoldl(ModuleFun, Cache, dict:to_list(TempRecords)),
- {dict:from_list(TempRecordList), NewCache}.
- %% dict:map(ModuleFun, TempRecords).
+ lists:foreach(ModuleFun, AllModules).
-check_record_fields(Records, TempExpTypes, Cache) ->
+check_record_fields(AllModules, CServer, TempExpTypes) ->
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
CheckFun =
- fun({Module, Element}, C0) ->
+ fun(Module) ->
CheckForm = fun(Form, Site, C1) ->
erl_types:t_check_record_fields(Form, TempExpTypes,
- Site, Records,
+ Site, RecordTable,
VarTable, C1)
end,
- ElemFun =
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
+ RecordFun =
fun({Key, Value}, C2) ->
case Key of
{record, Name} ->
@@ -403,9 +404,10 @@ check_record_fields(Records, TempExpTypes, Cache) ->
msg_with_position(Fun, FileLine)
end
end,
- lists:foldl(ElemFun, C0, dict:to_list(Element))
+ C0 = erl_types:cache__new(),
+ _ = lists:foldl(RecordFun, C0, maps:to_list(RecordMap))
end,
- lists:foldl(CheckFun, Cache, dict:to_list(Records)).
+ lists:foreach(CheckFun, AllModules).
msg_with_position(Fun, FileLine) ->
try Fun()
@@ -417,10 +419,37 @@ msg_with_position(Fun, FileLine) ->
throw({error, NewMsg})
end.
--spec merge_records(mod_records(), mod_records()) -> mod_records().
+-spec merge_types(codeserver(), dialyzer_plt:plt()) -> codeserver().
-merge_records(NewRecords, OldRecords) ->
- dict:merge(fun(_Key, NewVal, _OldVal) -> NewVal end, NewRecords, OldRecords).
+merge_types(CServer, Plt) ->
+ AllNewModules = dialyzer_codeserver:all_temp_modules(CServer),
+ AllNewModulesSet = sets:from_list(AllNewModules),
+ AllOldModulesSet = dialyzer_plt:all_modules(Plt),
+ AllModulesSet = sets:union(AllNewModulesSet, AllOldModulesSet),
+ ModuleFun =
+ fun(Module) ->
+ KeepOldFun =
+ fun() ->
+ case dialyzer_plt:get_module_types(Plt, Module) of
+ none -> no;
+ {value, OldRecords} ->
+ case sets:is_element(Module, AllNewModulesSet) of
+ true -> no;
+ false -> {yes, OldRecords}
+ end
+ end
+ end,
+ Records =
+ case KeepOldFun() of
+ no ->
+ dialyzer_codeserver:lookup_temp_mod_records(Module, CServer);
+ {yes, OldRecords} ->
+ OldRecords
+ end,
+ dialyzer_codeserver:store_temp_records(Module, Records, CServer)
+ end,
+ lists:foreach(ModuleFun, sets:to_list(AllModulesSet)),
+ CServer.
%% ============================================================================
%%
@@ -428,17 +457,17 @@ merge_records(NewRecords, OldRecords) ->
%%
%% ============================================================================
--type spec_dict() :: dict:dict().
--type callback_dict() :: dict:dict().
+-type spec_map() :: dialyzer_codeserver:contracts().
+-type callback_map() :: dialyzer_codeserver:contracts().
-spec get_spec_info(module(), abstract_code(), type_table()) ->
- {'ok', spec_dict(), callback_dict()} | {'error', string()}.
+ {'ok', spec_map(), callback_map()} | {'error', string()}.
-get_spec_info(ModName, AbstractCode, RecordsDict) ->
+get_spec_info(ModName, AbstractCode, RecordsMap) ->
OptionalCallbacks0 = get_optional_callbacks(AbstractCode, ModName),
OptionalCallbacks = gb_sets:from_list(OptionalCallbacks0),
- get_spec_info(AbstractCode, dict:new(), dict:new(),
- RecordsDict, ModName, OptionalCallbacks, "nofile").
+ get_spec_info(AbstractCode, maps:new(), maps:new(),
+ RecordsMap, ModName, OptionalCallbacks, "nofile").
get_optional_callbacks(Abs, ModName) ->
[{ModName, F, A} || {F, A} <- get_optional_callbacks(Abs)].
@@ -456,7 +485,7 @@ get_optional_callbacks(Abs) ->
%% are erl_types:erl_type()
get_spec_info([{attribute, Anno, Contract, {Id, TypeSpec}}|Left],
- SpecDict, CallbackDict, RecordsDict, ModName, OptCb, File)
+ SpecMap, CallbackMap, RecordsMap, ModName, OptCb, File)
when ((Contract =:= 'spec') or (Contract =:= 'callback')),
is_list(TypeSpec) ->
Ln = erl_anno:line(Anno),
@@ -465,24 +494,24 @@ get_spec_info([{attribute, Anno, Contract, {Id, TypeSpec}}|Left],
{F, A} -> {ModName, F, A}
end,
Xtra = [optional_callback || gb_sets:is_member(MFA, OptCb)],
- ActiveDict =
+ ActiveMap =
case Contract of
- spec -> SpecDict;
- callback -> CallbackDict
+ spec -> SpecMap;
+ callback -> CallbackMap
end,
- try dict:find(MFA, ActiveDict) of
+ try maps:find(MFA, ActiveMap) of
error ->
SpecData = {TypeSpec, Xtra},
- NewActiveDict =
+ NewActiveMap =
dialyzer_contracts:store_tmp_contract(MFA, {File, Ln}, SpecData,
- ActiveDict, RecordsDict),
- {NewSpecDict, NewCallbackDict} =
+ ActiveMap, RecordsMap),
+ {NewSpecMap, NewCallbackMap} =
case Contract of
- spec -> {NewActiveDict, CallbackDict};
- callback -> {SpecDict, NewActiveDict}
+ spec -> {NewActiveMap, CallbackMap};
+ callback -> {SpecMap, NewActiveMap}
end,
- get_spec_info(Left, NewSpecDict, NewCallbackDict,
- RecordsDict, ModName, OptCb, File);
+ get_spec_info(Left, NewSpecMap, NewCallbackMap,
+ RecordsMap, ModName, OptCb, File);
{ok, {{OtherFile, L}, _D}} ->
{Mod, Fun, Arity} = MFA,
Msg = flat_format(" Contract/callback for function ~w:~w/~w "
@@ -495,16 +524,16 @@ get_spec_info([{attribute, Anno, Contract, {Id, TypeSpec}}|Left],
[Ln, Error])}
end;
get_spec_info([{attribute, _, file, {IncludeFile, _}}|Left],
- SpecDict, CallbackDict, RecordsDict, ModName, OptCb, _File) ->
- get_spec_info(Left, SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, IncludeFile);
-get_spec_info([_Other|Left], SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, File) ->
- get_spec_info(Left, SpecDict, CallbackDict,
- RecordsDict, ModName, OptCb, File);
-get_spec_info([], SpecDict, CallbackDict,
- _RecordsDict, _ModName, _OptCb, _File) ->
- {ok, SpecDict, CallbackDict}.
+ SpecMap, CallbackMap, RecordsMap, ModName, OptCb, _File) ->
+ get_spec_info(Left, SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, IncludeFile);
+get_spec_info([_Other|Left], SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, File) ->
+ get_spec_info(Left, SpecMap, CallbackMap,
+ RecordsMap, ModName, OptCb, File);
+get_spec_info([], SpecMap, CallbackMap,
+ _RecordsMap, _ModName, _OptCb, _File) ->
+ {ok, SpecMap, CallbackMap}.
-spec get_fun_meta_info(module(), abstract_code(), [dial_warn_tag()]) ->
dialyzer_codeserver:fun_meta_info() | {'error', string()}.
@@ -700,7 +729,7 @@ format_errors([]) ->
-spec format_sig(erl_types:erl_type()) -> string().
format_sig(Type) ->
- format_sig(Type, dict:new()).
+ format_sig(Type, maps:new()).
-spec format_sig(erl_types:erl_type(), type_table()) -> string().
@@ -952,9 +981,7 @@ label(Tree) ->
-spec parallelism() -> integer().
parallelism() ->
- CPUs = erlang:system_info(logical_processors_available),
- Schedulers = erlang:system_info(schedulers),
- min(CPUs, Schedulers).
+ erlang:system_info(schedulers_online).
-spec family([{K,V}]) -> [{K,[V]}].
diff --git a/lib/dialyzer/src/dialyzer_worker.erl b/lib/dialyzer/src/dialyzer_worker.erl
index 418c9798b3..af0f2e9e08 100644
--- a/lib/dialyzer/src/dialyzer_worker.erl
+++ b/lib/dialyzer/src/dialyzer_worker.erl
@@ -56,10 +56,14 @@ launch(Mode, Job, InitData, Coordinator) ->
%%--------------------------------------------------------------------
-init(#state{job = SCC, mode = Mode, init_data = InitData} = State) when
+init(#state{job = SCC, mode = Mode, init_data = InitData,
+ coordinator = Coordinator} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- DependsOn = dialyzer_succ_typings:find_depends_on(SCC, InitData),
- ?debug("Deps ~p: ~p\n",[SCC, DependsOn]),
+ DependsOnSCCs = dialyzer_succ_typings:find_depends_on(SCC, InitData),
+ ?debug("~w: Deps ~p: ~p\n", [self(), SCC, DependsOnSCCs]),
+ Pids = dialyzer_coordinator:sccs_to_pids(DependsOnSCCs, Coordinator),
+ ?debug("~w: PidsDeps ~p\n", [self(), Pids]),
+ DependsOn = [{Pid, erlang:monitor(process, Pid)} || Pid <- Pids],
loop(updating, State#state{depends_on = DependsOn});
init(#state{mode = Mode} = State) when
Mode =:= 'compile'; Mode =:= 'warnings' ->
@@ -67,7 +71,7 @@ init(#state{mode = Mode} = State) when
loop(updating, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- ?debug("Update: ~p\n",[State#state.job]),
+ ?debug("~w: Update: ~p\n", [self(), State#state.job]),
NextStatus =
case waits_more_success_typings(State) of
true -> waiting;
@@ -76,11 +80,11 @@ loop(updating, #state{mode = Mode} = State) when
loop(NextStatus, State);
loop(waiting, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- ?debug("Wait: ~p\n",[State#state.job]),
+ ?debug("~w: Wait: ~p\n", [self(), State#state.job]),
NewState = wait_for_success_typings(State),
loop(updating, NewState);
loop(running, #state{mode = 'compile'} = State) ->
- dialyzer_coordinator:request_activation(State#state.coordinator),
+ request_activation(State),
?debug("Compile: ~s\n",[State#state.job]),
Result =
case start_compilation(State) of
@@ -92,51 +96,28 @@ loop(running, #state{mode = 'compile'} = State) ->
end,
report_to_coordinator(Result, State);
loop(running, #state{mode = 'warnings'} = State) ->
- dialyzer_coordinator:request_activation(State#state.coordinator),
+ request_activation(State),
?debug("Warning: ~s\n",[State#state.job]),
Result = collect_warnings(State),
report_to_coordinator(Result, State);
loop(running, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
request_activation(State),
- ?debug("Run: ~p\n",[State#state.job]),
+ ?debug("~w: Run: ~p\n", [self(), State#state.job]),
NotFixpoint = do_work(State),
- ok = broadcast_done(State),
report_to_coordinator(NotFixpoint, State).
waits_more_success_typings(#state{depends_on = Depends}) ->
Depends =/= [].
-broadcast_done(#state{job = SCC, init_data = InitData,
- coordinator = Coordinator}) ->
- RequiredBy = dialyzer_succ_typings:find_required_by(SCC, InitData),
- {Callers, Unknown} =
- dialyzer_coordinator:sccs_to_pids(RequiredBy, Coordinator),
- send_done(Callers, SCC),
- continue_broadcast_done(Unknown, SCC, Coordinator).
-
-send_done(Callers, SCC) ->
- ?debug("Sending ~p: ~p\n",[SCC, Callers]),
- SendSTFun = fun(PID) -> PID ! {done, SCC} end,
- lists:foreach(SendSTFun, Callers).
-
-continue_broadcast_done([], _SCC, _Coordinator) -> ok;
-continue_broadcast_done(Rest, SCC, Coordinator) ->
- %% This time limit should be greater than the time required
- %% by the coordinator to spawn all processes.
- timer:sleep(500),
- {Callers, Unknown} = dialyzer_coordinator:sccs_to_pids(Rest, Coordinator),
- send_done(Callers, SCC),
- continue_broadcast_done(Unknown, SCC, Coordinator).
-
wait_for_success_typings(#state{depends_on = DependsOn} = State) ->
receive
- {done, SCC} ->
- ?debug("GOT ~p: ~p\n",[State#state.job, SCC]),
- State#state{depends_on = DependsOn -- [SCC]}
+ {'DOWN', Ref, process, Pid, _Info} ->
+ ?debug("~w: ~p got DOWN: ~p\n", [self(), State#state.job, Pid]),
+ State#state{depends_on = DependsOn -- [{Pid, Ref}]}
after
5000 ->
- ?debug("Still Waiting ~p: ~p\n",[State#state.job, DependsOn]),
+ ?debug("~w: Still Waiting ~p:\n ~p\n", [self(), State#state.job, DependsOn]),
State
end.
@@ -150,7 +131,7 @@ do_work(#state{mode = Mode, job = Job, init_data = InitData}) ->
end.
report_to_coordinator(Result, #state{job = Job, coordinator = Coordinator}) ->
- ?debug("Done: ~p\n",[Job]),
+ ?debug("~w: Done: ~p\n",[self(), Job]),
dialyzer_coordinator:job_done(Job, Result, Coordinator).
start_compilation(#state{job = Job, init_data = InitData}) ->
diff --git a/lib/dialyzer/test/abstract_SUITE.erl b/lib/dialyzer/test/abstract_SUITE.erl
index 269db3e836..0e84dfab24 100644
--- a/lib/dialyzer/test/abstract_SUITE.erl
+++ b/lib/dialyzer/test/abstract_SUITE.erl
@@ -7,7 +7,7 @@
-include_lib("common_test/include/ct.hrl").
-include("dialyzer_test_constants.hrl").
--export([suite/0, all/0, init_per_suite/0, init_per_suite/1]).
+-export([suite/0, all/0, init_per_suite/0, init_per_suite/1, end_per_suite/1]).
-export([generated_case/1]).
suite() ->
@@ -24,6 +24,10 @@ init_per_suite(Config) ->
ok -> [{dialyzer_options, []}|Config]
end.
+end_per_suite(_Config) ->
+ %% This function is required since init_per_suite/1 exists.
+ ok.
+
generated_case(Config) when is_list(Config) ->
%% Equivalent to:
%%
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
index cb6a88786e..365b4798c5 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
{dialyzer_options, []}.
-{time_limit, 2}.
+{time_limit, 5}.
diff --git a/lib/dialyzer/test/map_SUITE_data/dialyzer_options b/lib/dialyzer/test/map_SUITE_data/dialyzer_options
index 50991c9bc5..02425c33f2 100644
--- a/lib/dialyzer/test/map_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/map_SUITE_data/dialyzer_options
@@ -1 +1,2 @@
{dialyzer_options, []}.
+{time_limit, 30}.
diff --git a/lib/dialyzer/test/map_SUITE_data/results/map_galore b/lib/dialyzer/test/map_SUITE_data/results/map_galore
index 6ea88f01f8..c34ba5cf30 100644
--- a/lib/dialyzer/test/map_SUITE_data/results/map_galore
+++ b/lib/dialyzer/test/map_SUITE_data/results/map_galore
@@ -20,9 +20,9 @@ map_galore.erl:186: The pattern #{'x':=2} can never match the type #{'x':=3}
map_galore.erl:187: The pattern #{'x':=3} can never match the type {'a','b','c'}
map_galore.erl:188: The pattern #{'x':=3} can never match the type #{'y':=3}
map_galore.erl:189: The pattern #{'x':=3} can never match the type #{'x':=[101 | 104 | 114 | 116,...]}
-map_galore.erl:2304: Cons will produce an improper list since its 2nd argument is {'b','a'}
-map_galore.erl:2304: The call maps:from_list(nonempty_improper_list({'a','b'},{'b','a'})) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
-map_galore.erl:2305: The call maps:from_list('a') will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
-map_galore.erl:2306: The call maps:from_list(42) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:2280: Cons will produce an improper list since its 2nd argument is {'b','a'}
+map_galore.erl:2280: The call maps:from_list(nonempty_improper_list({'a','b'},{'b','a'})) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:2281: The call maps:from_list('a') will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:2282: The call maps:from_list(42) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
map_galore.erl:997: A key of type 'nonexisting' cannot exist in a map of type #{}
map_galore.erl:998: A key of type 'nonexisting' cannot exist in a map of type #{1:='a', 2:='b', 4:='d', 5:='e', float()=>'c'}
diff --git a/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl b/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl
index 2611241379..99eb73a5f6 100644
--- a/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl
+++ b/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl
@@ -2070,11 +2070,8 @@ t_bif_map_values(Config) when is_list(Config) ->
ok.
t_erlang_hash(Config) when is_list(Config) ->
-
ok = t_bif_erlang_phash2(),
ok = t_bif_erlang_phash(),
- ok = t_bif_erlang_hash(),
-
ok.
t_bif_erlang_phash2() ->
@@ -2117,27 +2114,6 @@ t_bif_erlang_phash() ->
2620391445 = erlang:phash(M2,Sz), % 3590546636
ok.
-t_bif_erlang_hash() ->
- Sz = 1 bsl 27 - 1,
- 39684169 = erlang:hash(#{},Sz), % 5158
- 33673142 = erlang:hash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz), % 71555838
- 95337869 = erlang:hash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz), % 5497225
- 108959561 = erlang:hash(#{ 1 => a },Sz), % 126071654
- 59623150 = erlang:hash(#{ a => 1 },Sz), % 126426236
-
- 42775386 = erlang:hash(#{{} => <<>>},Sz), % 101655720
- 71692856 = erlang:hash(#{<<>> => {}},Sz), % 101655720
-
- M0 = #{ a => 1, "key" => <<"value">> },
- M1 = maps:remove("key",M0),
- M2 = M1#{ "key" => <<"value">> },
-
- 70254632 = erlang:hash(M0,Sz), % 38260486
- 59623150 = erlang:hash(M1,Sz), % 126426236
- 70254632 = erlang:hash(M2,Sz), % 38260486
- ok.
-
-
t_map_encode_decode(Config) when is_list(Config) ->
<<131,116,0,0,0,0>> = erlang:term_to_binary(#{}),
Pairs = [
diff --git a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
index 06ed52043a..cb301ff6a1 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
{dialyzer_options, [{warnings, [no_unused, no_return]}]}.
-{time_limit, 20}.
+{time_limit, 40}.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/results/weird b/lib/dialyzer/test/opaque_SUITE_data/results/weird
new file mode 100644
index 0000000000..d7f57cd152
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/results/weird
@@ -0,0 +1,6 @@
+
+weird_warning1.erl:15: Matching of pattern {'a', Dict} tagged with a record name violates the declared type of #b{q::queue:queue(_)}
+weird_warning2.erl:13: Matching of pattern <{'b', Queue}, Key, Value> tagged with a record name violates the declared type of <#a{d::dict:dict(_,_)},'my_key','my_value'>
+weird_warning3.erl:14: The call weird_warning3:add_element(#a{d::queue:queue(_)},'my_key','my_value') does not have a term of type #a{d::dict:dict(_,_)} | #b{q::queue:queue(_)} (with opaque subterms) as 1st argument
+weird_warning3.erl:16: The attempt to match a term of type #a{d::queue:queue(_)} against the pattern {'a', Dict} breaks the opacity of queue:queue(_)
+weird_warning3.erl:18: Matching of pattern {'b', Queue} tagged with a record name violates the declared type of #a{d::queue:queue(_)}
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl
index 6a5b593db0..53b08cc5c9 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl
@@ -1340,7 +1340,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
{{Tag, PatTypes}, false};
false ->
%% Try to find out if this is a default clause in a list
- %% comprehension and supress this. A real Hack(tm)
+ %% comprehension and suppress this. A real Hack(tm)
Force0 =
case is_compiler_generated(cerl:get_ann(C)) of
true ->
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl
new file mode 100644
index 0000000000..094138e72b
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning1.erl
@@ -0,0 +1,18 @@
+-module(weird_warning1).
+-export([public_func/0]).
+
+-record(a, {
+ d = dict:new() :: dict:dict()
+ }).
+
+-record(b, {
+ q = queue:new() :: queue:queue()
+ }).
+
+public_func() ->
+ add_element(#b{}, my_key, my_value).
+
+add_element(#a{d = Dict}, Key, Value) ->
+ dict:store(Key, Value, Dict);
+add_element(#b{q = Queue}, Key, Value) ->
+ queue:in({Key, Value}, Queue).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl
new file mode 100644
index 0000000000..4e4512157b
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning2.erl
@@ -0,0 +1,14 @@
+-module(weird_warning2).
+-export([public_func/0]).
+
+-record(a, {d = dict:new() :: dict:dict()}).
+
+-record(b, {q = queue:new() :: queue:queue()}).
+
+public_func() ->
+ add_element(#a{}, my_key, my_value).
+
+add_element(#a{d = Dict}, Key, Value) ->
+ dict:store(Key, Value, Dict);
+add_element(#b{q = Queue}, Key, Value) ->
+ queue:in({Key, Value}, Queue).
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl
new file mode 100644
index 0000000000..b70ca645cb
--- /dev/null
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/weird/weird_warning3.erl
@@ -0,0 +1,19 @@
+-module(weird_warning3).
+-export([public_func/0]).
+
+-record(a, {
+ d = dict:new() :: dict:dict()
+ }).
+
+-record(b, {
+ q = queue:new() :: queue:queue()
+ }).
+
+public_func() ->
+ %% Notice that t_to_string() will create "#a{d::queue:queue(_)}".
+ add_element({a, queue:new()}, my_key, my_value).
+
+add_element(#a{d = Dict}, Key, Value) ->
+ dict:store(Key, Value, Dict);
+add_element(#b{q = Queue}, Key, Value) ->
+ queue:in({Key, Value}, Queue).
diff --git a/lib/dialyzer/test/options1_SUITE_data/results/compiler b/lib/dialyzer/test/options1_SUITE_data/results/compiler
index 30b6f4814a..cbb5115c91 100644
--- a/lib/dialyzer/test/options1_SUITE_data/results/compiler
+++ b/lib/dialyzer/test/options1_SUITE_data/results/compiler
@@ -31,6 +31,8 @@ cerl_inline.erl:2756: The pattern <{F, _L, D}, Vs> can never match the type <[1.
compile.erl:788: The pattern {'error', Es} can never match the type {'ok',<<_:64,_:_*8>>}
core_lint.erl:473: The pattern <{'c_atom', _, 'all'}, 'binary', _Def, St> can never match the type <_,#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}},tl::#c_nil{} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple',_,_} | #c_cons{hd::{_,_} | {_,_,_} | {_,_,_,_},tl::{_,_} | {_,_,_} | {_,_,_,_}}},[any()],_>
core_lint.erl:505: The pattern <_Req, 'unknown', St> can never match the type <non_neg_integer(),non_neg_integer(),_>
+sys_pre_expand.erl:625: Call to missing or unexported function erlang:hash/2
v3_codegen.erl:1569: The call v3_codegen:load_reg_1(V::any(),I::0,Rs::any(),pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0)
v3_codegen.erl:1571: The call v3_codegen:load_reg_1(V::any(),I::0,[],pos_integer()) will never return since it differs in the 4th argument from the success typing arguments: (any(),0,maybe_improper_list(),0)
v3_core.erl:646: Matching of pattern {'iprimop', _, _, _} tagged with a record name violates the declared type of #c_nil{anno::[any(),...]} | {'c_atom' | 'c_char' | 'c_float' | 'c_int' | 'c_string' | 'c_tuple' | 'c_var' | 'ibinary' | 'icatch' | 'ireceive1',[any(),...] | {_,_,_,_},_} | #c_cons{anno::[any(),...]} | #c_fname{anno::[any(),...]} | #iletrec{anno::{_,_,_,_},defs::[any(),...],body::[any(),...]} | #icase{anno::{_,_,_,_},args::[any()],clauses::[any()],fc::{_,_,_,_,_,_}} | #ireceive2{anno::{_,_,_,_},clauses::[any()],action::[any()]} | #ifun{anno::{_,_,_,_},id::[any(),...],vars::[any()],clauses::[any(),...],fc::{_,_,_,_,_,_}} | #imatch{anno::{_,_,_,_},guard::[],fc::{_,_,_,_,_,_}} | #itry{anno::{_,_,_,_},args::[any()],vars::[any(),...],body::[any(),...],evars::[any(),...],handler::[any(),...]}
+v3_kernel.erl:1381: Call to missing or unexported function erlang:hash/2
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
index 0108f91b7f..cf2cbe8e2b 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
@@ -565,7 +565,7 @@ resolve_inst({make_fun2,Args},_,_,Lbls,Lambdas) ->
[OldIndex] = resolve_args(Args),
{value,{OldIndex,{F,A,_Lbl,_Index,NumFree,OldUniq}}} =
lists:keysearch(OldIndex, 1, Lambdas),
- [{_,{M,_,_}}|_] = Lbls, % Slighly kludgy.
+ [{_,{M,_,_}}|_] = Lbls, % Slightly kludgy.
{make_fun2,{M,F,A},OldIndex,OldUniq,NumFree};
resolve_inst(Instr, Imports, Str, Lbls, _Lambdas) ->
resolve_inst(Instr, Imports, Str, Lbls).
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
index 95d2076ccf..8fca202b8c 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
@@ -951,7 +951,7 @@ i_letrec(Es, B, Xs, Ctxt, Ren, Env, S) ->
%% Finally, we create new letrec-bindings for any and all
%% residualised definitions. All referenced functions should have
- %% been visited; the call to `visit' below is expected to retreive a
+ %% been visited; the call to `visit' below is expected to retrieve a
%% cached expression.
Rs1 = keep_referenced(Rs, S4),
{Es1, S5} = mapfoldl(fun (R, S) ->
@@ -997,7 +997,7 @@ i_apply(E, Ctxt, Ren, Env, S) ->
%% location could be recycled after the flag has been tested, but
%% there is no real advantage to that, because in practice, only
%% 4-5% of all created store locations will ever be reused, while
- %% there will be a noticable overhead for managing the free list.)
+ %% there will be a noticeable overhead for managing the free list.)
case st__get_app_inlined(L, S3) of
true ->
%% The application was inlined, so we have the final
@@ -2007,7 +2007,7 @@ residualize_operand(Opnd, E, S) ->
case st__get_opnd_effect(Opnd#opnd.loc, S) of
true ->
%% The operand has not been visited, so we do that now, but
- %% in `effect' context. (Waddell's algoritm does some stuff
+ %% in `effect' context. (Waddell's algorithm does some stuff
%% here to account specially for the operand size, which
%% appears unnecessary.)
{E1, S1} = i(Opnd#opnd.expr, effect, Opnd#opnd.ren,
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
index 01c2512397..76ae871aee 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
@@ -469,7 +469,7 @@ get(Key, Env) ->
-define(MINIMUM_RANGE, 1000).
-define(START_RANGE_FACTOR, 50).
-define(MAX_RETRIES, 2). % retries before enlarging range
--define(ENLARGE_FACTOR, 10). % range enlargment factor
+-define(ENLARGE_FACTOR, 10). % range enlargement factor
-ifdef(DEBUG).
%% If you want to use these process dictionary counters, make sure to
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
index 49a95a95e5..69139cd568 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
@@ -316,7 +316,7 @@ record_test_in_guard(Line, Term, Name, Vs, St) ->
%% code bloat.)
%% (4) Xref may be run on the abstract code, so the name in the
%% abstract code must be erlang:is_record/3.
- %% (5) To achive both (3) and (4) at the same time, set the name
+ %% (5) To achieve both (3) and (4) at the same time, set the name
%% here to erlang:is_record/3, but mark it as compiler-generated.
%% The v3_core pass will change the name to erlang:internal_is_record/3.
Fs = record_fields(Name, St),
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
index 33a322b466..acb49b5faf 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
@@ -1667,7 +1667,7 @@ bs_function({function,Name,Arity,CLabel,Asm0}=Func) ->
%%%
%%% Pass 1: Found out which bs_restore's that are needed. For now we assume
-%%% that a bs_restore is needed unless it is directly preceeded by a bs_save.
+%%% that a bs_restore is needed unless it is directly preceded by a bs_save.
%%%
bs_needed([{bs_save,Name},{bs_restore,Name}|T], N, _BsUsed, Dict) ->
diff --git a/lib/dialyzer/test/plt_SUITE.erl b/lib/dialyzer/test/plt_SUITE.erl
index 460d4e2240..ba153c1c27 100644
--- a/lib/dialyzer/test/plt_SUITE.erl
+++ b/lib/dialyzer/test/plt_SUITE.erl
@@ -26,6 +26,8 @@ build_plt(Config) ->
end.
beam_tests(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "beam_tests.plt"),
Prog = <<"
-module(no_auto_import).
@@ -42,10 +44,12 @@ beam_tests(Config) when is_list(Config) ->
">>,
Opts = [no_auto_import],
{ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts),
- [] = run_dialyzer(plt_build, [BeamFile], []),
+ [] = run_dialyzer(plt_build, [BeamFile], [{output_plt, Plt}]),
ok.
run_plt_check(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "run_plt_check.plt"),
Mod1 = <<"
-module(run_plt_check1).
">>,
@@ -56,7 +60,7 @@ run_plt_check(Config) when is_list(Config) ->
{ok, BeamFile1} = compile(Config, Mod1, run_plt_check1, []),
{ok, BeamFile2} = compile(Config, Mod2A, run_plt_check2, []),
- [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], []),
+ [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], [{output_plt, Plt}]),
Mod2B = <<"
-module(run_plt_check2).
@@ -70,11 +74,13 @@ run_plt_check(Config) when is_list(Config) ->
% callgraph warning as run_plt_check2:call/1 makes a call to unexported
% function run_plt_check1:call/1.
- [_] = run_dialyzer(plt_check, [], []),
+ [_] = run_dialyzer(plt_check, [], [{init_plt, Plt}]),
ok.
run_succ_typings(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "run_succ_typings.plt"),
Mod1A = <<"
-module(run_succ_typings1).
@@ -84,7 +90,7 @@ run_succ_typings(Config) when is_list(Config) ->
">>,
{ok, BeamFile1} = compile(Config, Mod1A, run_succ_typings1, []),
- [] = run_dialyzer(plt_build, [BeamFile1], []),
+ [] = run_dialyzer(plt_build, [BeamFile1], [{output_plt, Plt}]),
Mod1B = <<"
-module(run_succ_typings1).
@@ -107,9 +113,11 @@ run_succ_typings(Config) when is_list(Config) ->
{ok, BeamFile2} = compile(Config, Mod2, run_succ_typings2, []),
% contract types warning as run_succ_typings2:call/0 makes a call to
% run_succ_typings1:call/0, which returns a (not b) in the PLT.
- [_] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, false}]),
+ [_] = run_dialyzer(succ_typings, [BeamFile2],
+ [{check_plt, false}, {init_plt, Plt}]),
% warning not returned as run_succ_typings1 is updated in the PLT.
- [] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, true}]),
+ [] = run_dialyzer(succ_typings, [BeamFile2],
+ [{check_plt, true}, {init_plt, Plt}]),
ok.
@@ -253,16 +261,15 @@ remove_plt(Config) ->
bad_dialyzer_attr(Config) ->
PrivDir = ?config(priv_dir, Config),
-
+ Plt = filename:join(PrivDir, "plt_bad_dialyzer_attr.plt"),
Prog1 = <<"-module(dial).
-dialyzer({no_return, [undef/0]}).">>,
{ok, Beam1} = compile(Config, Prog1, dial, []),
- Plt = filename:join(PrivDir, "bad_attr.plt"),
{dialyzer_error,
"Analysis failed with error:\n"
"Could not scan the following file(s):\n"
" Unknown function undef/0 in line " ++ _} =
- (catch run_dialyzer(plt_build, [Beam1], [])),
+ (catch run_dialyzer(plt_build, [Beam1], [{output_plt, Plt}])),
Prog2 = <<"-module(dial).
-dialyzer({no_return, [{undef,1,2}]}).">>,
@@ -271,7 +278,7 @@ bad_dialyzer_attr(Config) ->
"Analysis failed with error:\n"
"Could not scan the following file(s):\n"
" Bad function {undef,1,2} in line " ++ _} =
- (catch run_dialyzer(plt_build, [Beam2], [])),
+ (catch run_dialyzer(plt_build, [Beam2], [{output_plt, Plt}])),
ok.
diff --git a/lib/dialyzer/test/r9c_SUITE_data/results/mnesia b/lib/dialyzer/test/r9c_SUITE_data/results/mnesia
index bf67447ee7..71acdd9c9e 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/results/mnesia
+++ b/lib/dialyzer/test/r9c_SUITE_data/results/mnesia
@@ -17,6 +17,7 @@ mnesia_frag.erl:294: The call mnesia_frag:remote_collect(Ref::reference(),{'erro
mnesia_frag.erl:304: The call mnesia_frag:remote_collect(Ref::reference(),{'error',{'node_not_running',_}},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()]))
mnesia_frag.erl:312: The call mnesia_frag:remote_collect(Ref::reference(),LocalRes::{'error',_},[],OldSelectFun::fun(() -> [any()])) will never return since it differs in the 2nd argument from the success typing arguments: (reference(),'ok',[any()],fun(() -> [any()]))
mnesia_frag_hash.erl:24: Callback info about the mnesia_frag_hash behaviour is not available
+mnesia_frag_old_hash.erl:105: Call to missing or unexported function erlang:hash/2
mnesia_frag_old_hash.erl:23: Callback info about the mnesia_frag_hash behaviour is not available
mnesia_index.erl:52: The call mnesia_lib:other_val(Var::{_,'commit_work' | 'index' | 'setorbag' | 'storage_type' | {'index',_}},_ReASoN_::any()) will never return since it differs in the 1st argument from the success typing arguments: ({_,'active_replicas' | 'where_to_read' | 'where_to_write'},any())
mnesia_lib.erl:1028: The pattern {'EXIT', Reason} can never match the type [any()] | {'error',_}
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
index ed38b2f915..3829479a94 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
@@ -520,7 +520,7 @@ save_automatic_tagged_types([_M|Ms]) ->
%% remove_in_set_imports/3 :
%% input: list with tuples of each module's imports and module name
%% respectively.
-%% output: one list with same format but each occured import from a
+%% output: one list with same format but each occurred import from a
%% module in the input set (IMNameL) is removed.
remove_in_set_imports([{{imports,ImpL},_ModName}|Rest],InputMNameL,Acc) ->
NewImpL = remove_in_set_imports1(ImpL,InputMNameL,[]),
@@ -1628,7 +1628,7 @@ tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) ->
tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag).
%% reads the content from the configuration file and returns the
-%% selected part choosen by InfoType. Assumes that the config file
+%% selected part chosen by InfoType. Assumes that the config file
%% content is an Erlang term.
read_config_file(ModuleName,InfoType) when atom(InfoType) ->
CfgList = read_config_file(ModuleName),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
index c26b8f851b..a4f39bde74 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
@@ -4028,7 +4028,7 @@ check_sequence(S,Type,Comps) ->
{CRelInf,NewComps2} = componentrelation_leadingattr(S,NewComps),
% io:format("CRelInf: ~p~n",[CRelInf]),
% io:format("NewComps2: ~p~n",[NewComps2]),
- %% CompListWithTblInf has got a lot unecessary info about
+ %% CompListWithTblInf has got a lot unnecessary info about
%% the involved class removed, as the class of the object
%% set.
CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps2),
@@ -4686,7 +4686,7 @@ any_component_relation(_,[],_,_,Acc) ->
%% evaluate_atpath/4 finds out whether the at notation refers to the
%% search level. The list of referenced names in the AtNot list shall
%% begin with a name that exists on the level it refers to. If the
-%% found AtPath is refering to the same sub-branch as the simple table
+%% found AtPath is referring to the same sub-branch as the simple table
%% has, then there shall not be any leading attribute info on this
%% level.
evaluate_atpath(_,[],Cnames,{innermost,AtPath=[Ref|_Refs]}) ->
@@ -4857,7 +4857,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
case Cons of
[{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
%% This AtList must have an "outermost" at sign to be
- %% relevent here.
+ %% relevant here.
[{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2]
= AtList,
%% #'ObjectClassFieldType'{class=ClassDef} = Def,
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
index 392896932a..0b5ea85681 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
@@ -1259,7 +1259,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
end,
case DecObjInf of
{Cname,ObjSet} -> % this must be the component were an object is
- %% choosen from the object set according to the table
+ %% chosen from the object set according to the table
%% constraint.
{[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
PostpDec};
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
index 9725da4d11..fb9ffb13db 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
@@ -1096,7 +1096,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
end,
case DecObjInf of
{Cname,ObjSet} -> % this must be the component were an object is
- %% choosen from the object set according to the table
+ %% chosen from the object set according to the table
%% constraint.
{[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
PostpDec};
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
index 5f8c7a0de8..32676b3448 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
@@ -2721,7 +2721,7 @@ prioritize_error(ErrList) ->
end,
NewErrList),
case SplitErrs of
- {[],UndefPosErrs} -> % if no error with Positon exists
+ {[],UndefPosErrs} -> % if no error with Position exists
lists:last(UndefPosErrs);
{IntPosErrs,_} ->
IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
index 5854f8edbd..8f4d189b5a 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
@@ -1036,7 +1036,7 @@ decode_real2(Buffer0, Form, Len, RemBytes1) ->
%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
index 0457425445..6e12d36579 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
@@ -1034,7 +1034,7 @@ decode_real_notag(_Buffer, _Form) ->
%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
index b163aa24ac..97c92a2dd1 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
@@ -823,7 +823,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
index 15986cc217..aa2cf5ba88 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
@@ -1000,7 +1000,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
index 43d9bef54e..24f7949c21 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
@@ -1059,7 +1059,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
index 4f0ca99cce..8be5b0cd6e 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
@@ -108,7 +108,7 @@ user(Pid, User, Pass) ->
gen_server:call(Pid, {user, User, Pass}, infinity).
%% user(Pid, User, Pass,Acc)
-%% Purpose: Login whith a supplied account name
+%% Purpose: Login with a supplied account name
%% Args: Pid = pid(), User = Pass = Acc = string()
%% Returns: ok | {error, euser} | {error, econn} | {error, eacct}
user(Pid, User, Pass,Acc) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
index cf05431f5a..039960dac7 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
@@ -24,7 +24,7 @@
%%% - RFC 3310 Authentication and Key Agreement (AKA) (not yet!)
%%% - HTTP/1.1 Specification Errata found at
%%% http://world.std.com/~lawrence/http_errata.html
-%%% Additionaly follows the following recommendations:
+%%% Additionally follows the following recommendations:
%%% - RFC 3143 Known HTTP Proxy/Caching Problems (not yet!)
%%% - draft-nottingham-hdrreg-http-00.txt (not yet!)
%%%
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
index ebefcd7ad7..28ea42c685 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
@@ -697,7 +697,7 @@ lookup(Key,Val) ->
%%% This code is for parsing trailer headers in chunked messages.
%%% Will be deprecated whenever I have found an alternative working solution!
%%% Note:
-%%% - The header names are returned slighly different from what the what
+%%% - The header names are returned slightly different from what the what
%%% inet_drv returns
read_headers_old(Scheme,Socket,Timeout) ->
read_headers_old(<<>>,Scheme,Socket,Timeout,[],[]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
index 45beaa84f7..d2653184aa 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
@@ -95,7 +95,7 @@ abort_session(Addr,Sid,Msg) ->
next_request(Addr,Sid) ->
gen_server:call(?HMACALL,{next_request,Addr,Sid},infinity).
-%%% Session handler has succeded to set up a new session, now register
+%%% Session handler has succeed to set up a new session, now register
%%% the socket
register_socket(Addr,Sid,Socket) ->
gen_server:cast(?HMACALL,{register_socket,Addr,Sid,Socket}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
index 85e06f43b6..3058ac3556 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
@@ -224,7 +224,7 @@ is_blocked(ServerRef) ->
%%
-%% Module API. Theese functions are intended for use from modules only.
+%% Module API. These functions are intended for use from modules only.
%%
config_lookup(Port, Query) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
index d7a698d65a..07f951d057 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
@@ -109,7 +109,7 @@ get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
%%If it is version prio to 1.1 kill the conneciton
[$H, $T, $T, $P, $\/, $1, $.,N] ->
case httpd_util:key1search(ParsedHeader,"connection","keep-alive")of
- %%if the connection isnt ordered to go down let it live
+ %%if the connection isn't ordered to go down let it live
%%The keep-alive value is the older http/1.1 might be older
%%Clients that use it.
"keep-alive" when N >= 49 ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl
index 47c7fc1b8d..50e0e42786 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_response.erl
@@ -34,7 +34,7 @@
-define(PROCEED_RESPONSE(StatusCode, Info),
{proceed,
[{response,{already_sent, StatusCode,
- httpd_util:key1search(Info#mod.data,content_lenght)}}]}).
+ httpd_util:key1search(Info#mod.data,content_length)}}]}).
-include("httpd.hrl").
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
index 6b872d7c95..73edcf6b92 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
@@ -60,7 +60,7 @@
% request_line, % string() Request Line
headers, % #req_headers{} Parsed request headers
entity_body= <<>>, % binary() Body of request
- connection, % boolean() true if persistant connection
+ connection, % boolean() true if persistent connection
status_code, % int() Status code
logging % int() 0=No logging
% 1=Only mod_log present
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
index e42494ff76..847d6e97c1 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
@@ -53,7 +53,7 @@ store_directory_data(Directory, DirData) ->
%% API
%%
-%% Compability API
+%% Compatibility API
store_user(UserName, Password, Port, Dir, AccessPassword) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
index 1203aeaa4c..a48f73274b 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
@@ -440,7 +440,7 @@ try_new_erl_scheme_method(Info,Env,Input,Mod,Func)->
%%----------------------------------------------------------------------
-%%The function recieves the data from the process that generates the page
+%%The function receives the data from the process that generates the page
%%and send the data to the client through the mod_cgi:send function
%%----------------------------------------------------------------------
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
index f600c65e92..d95c745b07 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
@@ -272,10 +272,10 @@ controlIfAllowed(AllowedNetworks,UserNetwork,IfAllowed,IfDenied)->
end.
-%---------------------------------------------------------------------%
-%The Denycontrol isn't neccessary to preform since the allow control %
-%override the deny control %
-%---------------------------------------------------------------------%
+%--------------------------------------------------------------------%
+%The Denycontrol isn't necessary to preform since the allow control %
+%override the deny control %
+%--------------------------------------------------------------------%
controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork)->
case AllowedNetworks of
[{allow,all}]->
@@ -657,7 +657,7 @@ getData2(HtAccessFileNames,SplittedPath,Info)->
%----------------------------------------------------------------------
%HtAccessFilenames is a list the names the accesssfiles can have
-%Path is the shortest match agains all alias and documentroot
+%Path is the shortest match against all alias and documentroot
%rest of splitted path is a list of the parts of the path
%Info is the mod recod from the server
%----------------------------------------------------------------------
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
index 4e6030d5e2..f2c45c4a3f 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
@@ -80,7 +80,7 @@ send_range_response(Path,Info,Ranges,FileInfo,LastModified)->
send_range_response(Path,Info,Start,Stop,FileInfo,LastModified)
end.
%%More than one range specified
-%%Send a multipart reponse to the user
+%%Send a multipart response to the user
%
%%An example of an multipart range response
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
index 76168f3890..a997db6880 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
@@ -48,8 +48,8 @@ do(Info) ->
%%----------------------------------------------------------------------
-%%Control that the request header did not contians any limitations
-%%wheather a response shall be createed or not
+%%Control that the request header did not contains any limitations
+%%whether a response shall be created or not
%%----------------------------------------------------------------------
do_responsecontrol(Info) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
index 19b571ac47..cc72a9b6fe 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
@@ -431,7 +431,7 @@ wrap_trans(State, Fun, Args, Retries, Mod, Kind) ->
%% read lock is only set on the first node
%% Nodes may either be a list of nodes or one node as an atom
%% Mnesia on all Nodes must be connected to each other, but
-%% it is not neccessary that they are up and running.
+%% it is not necessary that they are up and running.
lock(LockItem, LockKind) ->
case get(mnesia_activity_state) of
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
index fdbf3e4481..a85a08e4f8 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
@@ -775,7 +775,7 @@ restore_tables([Rec | Recs], Header, Schema, State = {local, LocalTabs, L}) ->
restore_tables([], _Header, _Schema, State) ->
State.
-%% Creates all neccessary dat files and inserts
+%% Creates all necessary dat files and inserts
%% the table definitions in the schema table
%%
%% Returns a list of local_tab tuples for all local tables
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
index 2b5c77b3ba..0403c7e978 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
@@ -332,7 +332,7 @@ really_retain(Name, Tab) ->
%%
%% {min, MinTabs}
%% Minimize redundancy and only keep checkpoint info together with
-%% one replica, preferrably at the local node. If any node involved
+%% one replica, preferably at the local node. If any node involved
%% the checkpoint goes down, the checkpoint is deactivated.
%%
%% {max, MaxTabs}
@@ -345,7 +345,7 @@ really_retain(Name, Tab) ->
%% {ram_overrides_dump, Tabs}
%% Only applicable for ram_copies. Bool controls which versions of
%% the records that should be included in the checkpoint state.
-%% true means that the latest comitted records in ram (i.e. the
+%% true means that the latest committed records in ram (i.e. the
%% records that the application accesses) should be included
%% in the checkpoint. false means that the records dumped to
%% dat-files (the records that will be loaded at startup) should
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
index 70fee1741e..07667d73f5 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
@@ -61,7 +61,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
Repair = mnesia_monitor:get_env(auto_repair),
Args = [{keypos, 2}, public, named_table, Type],
case Reason of
- {dumper, _} -> %% Resources allready allocated
+ {dumper, _} -> %% Resources already allocated
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
@@ -82,7 +82,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == ram_copies ->
Args = [{keypos, 2}, public, named_table, Type],
case Reason of
- {dumper, _} -> %% Resources allready allocated
+ {dumper, _} -> %% Resources already allocated
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
index 701aa8f598..accb631f2a 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
@@ -170,14 +170,14 @@ loop(State) ->
end;
%% If test_set_sticky fails, we send this to all nodes
- %% after aquiring a real write lock on Oid
+ %% after acquiring a real write lock on Oid
{stick, {Tab, _}, N} ->
?ets_insert(mnesia_sticky_locks, {Tab, N}),
loop(State);
%% The caller which sends this message, must have first
- %% aquired a write lock on the entire table
+ %% acquired a write lock on the entire table
{unstick, Tab} ->
?ets_delete(mnesia_sticky_locks, Tab),
loop(State);
@@ -738,11 +738,11 @@ dirty_sticky_lock(Tab, Key, Nodes, Lock) ->
sticky_wlock_table(Tid, Store, Tab) ->
sticky_lock(Tid, Store, {Tab, ?ALL}, write).
-%% aquire a wlock on Oid
+%% acquire a wlock on Oid
%% We store a {Tabname, write, Tid} in all locktables
%% on all nodes containing a copy of Tabname
%% We also store an item {{locks, Tab, Key}, write} in the
-%% local store when we have aquired the lock.
+%% local store when we have acquired the lock.
%%
wlock(Tid, Store, Oid) ->
{Tab, Key} = Oid,
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
index d1ff09ce29..7fd5f70e23 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
@@ -144,7 +144,7 @@ check_protocol([{Node, {accept, Mon, _Version, Protocol}} | Tail], Protocols) ->
end,
[node(Mon) | check_protocol(Tail, Protocols)];
false ->
- unlink(Mon), % Get rid of unneccessary link
+ unlink(Mon), % Get rid of unnecessary link
check_protocol(Tail, Protocols)
end;
check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
index ec07e1c1ab..fbd1356a7f 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
@@ -1265,7 +1265,7 @@ make_change_table_copy_type(Tab, Node, ToS) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% change index functions ....
-%% Pos is allready added by 1 in both of these functions
+%% Pos is already added by 1 in both of these functions
add_table_index(Tab, Pos) ->
schema_transaction(fun() -> do_add_table_index(Tab, Pos) end).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
index 3e08354b5a..09e310530d 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
@@ -1615,7 +1615,7 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
do_abort(Tid, Bin) when binary(Bin) ->
%% Possible optimization:
- %% If we want we could pass arround a flag
+ %% If we want we could pass around a flag
%% that tells us whether the binary contains
%% schema ops or not. Only if the binary
%% contains schema ops there are meningful
diff --git a/lib/dialyzer/test/small_SUITE_data/results/chars b/lib/dialyzer/test/small_SUITE_data/results/chars
new file mode 100644
index 0000000000..2c1f8f8d17
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/results/chars
@@ -0,0 +1,4 @@
+
+chars.erl:29: Invalid type specification for function chars:f/1. The success typing is (#{'b':=50}) -> 'ok'
+chars.erl:32: Function t1/0 has no local return
+chars.erl:32: The call chars:f(#{'b':=50}) breaks the contract (#{'a':=49,'b'=>50,'c'=>51}) -> 'ok'
diff --git a/lib/dialyzer/test/small_SUITE_data/src/anno.erl b/lib/dialyzer/test/small_SUITE_data/src/anno.erl
new file mode 100644
index 0000000000..70f1d42141
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/anno.erl
@@ -0,0 +1,18 @@
+-module(anno).
+
+%% OTP-14131
+
+-export([t1/0, t2/0, t3/0]).
+
+t1() ->
+ A = erl_parse:anno_from_term({attribute, 1, module, my_test}),
+ compile:forms([A], []).
+
+t2() ->
+ A = erl_parse:new_anno({attribute, 1, module, my_test}),
+ compile:forms([A], []).
+
+t3() ->
+ A = erl_parse:new_anno({attribute, 1, module, my_test}),
+ T = erl_parse:anno_to_term(A),
+ {attribute, 1, module, my_test} = T.
diff --git a/lib/dialyzer/test/small_SUITE_data/src/chars.erl b/lib/dialyzer/test/small_SUITE_data/src/chars.erl
new file mode 100644
index 0000000000..1e9c8ab6b9
--- /dev/null
+++ b/lib/dialyzer/test/small_SUITE_data/src/chars.erl
@@ -0,0 +1,32 @@
+-module(chars).
+
+%% ERL-313
+
+-export([t/0]).
+-export([t1/0]).
+
+-record(r, {f :: $A .. $Z}).
+
+-type cs() :: $A..$Z | $a .. $z | $/.
+
+-spec t() -> $0-$0..$9-$0| $?.
+
+t() ->
+ c(#r{f = $z - 3}),
+ c($z - 3),
+ c($B).
+
+-spec c(cs()) -> $3-$0..$9-$0.
+
+c($A + 1) -> 2;
+c(C) ->
+ case C of
+ $z - 3 -> 3;
+ #r{f = $z - 3} -> 7
+ end.
+
+%% Display contract with character in warning:
+-spec f(#{a := $1, b => $2, c => $3}) -> ok. % invalid type spec
+f(_) -> ok.
+
+t1() -> f(#{b => $2}). % breaks the contract
diff --git a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
index d608275efe..88ac486044 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
@@ -2,7 +2,7 @@
%%% File : tuple1.erl
%%% Author : Tobias Lindahl <[email protected]>
%%% Description : Exposed two bugs in the analysis;
-%%% one supressed warning and one crash.
+%%% one suppressed warning and one crash.
%%%
%%% Created : 13 Nov 2006 by Tobias Lindahl <[email protected]>
%%%-------------------------------------------------------------------
diff --git a/lib/diameter/include/diameter_gen.hrl b/lib/diameter/include/diameter_gen.hrl
index 611ad796a9..5361510d69 100644
--- a/lib/diameter/include/diameter_gen.hrl
+++ b/lib/diameter/include/diameter_gen.hrl
@@ -424,7 +424,7 @@ d(true, _, Name, Avp, Acc) ->
%% ... or not. Failures here won't be visible since they're a "normal"
%% occurrence if the peer sends a faulty AVP that we need to respond
-%% sensibly to. Log the occurence for traceability, but the peer will
+%% sensibly to. Log the occurrence for traceability, but the peer will
%% also receive info in the resulting answer message.
d(false, Reason, Name, Avp, {Avps, Acc}) ->
Stack = diameter_lib:get_stacktrace(),
diff --git a/lib/diameter/src/base/diameter.erl b/lib/diameter/src/base/diameter.erl
index e8f2f63f86..253f64133c 100644
--- a/lib/diameter/src/base/diameter.erl
+++ b/lib/diameter/src/base/diameter.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -377,6 +377,7 @@ call(SvcName, App, Message) ->
| {capabilities, [capability()]}
| {capabilities_cb, evaluable()}
| {capx_timeout, 'Unsigned32'()}
+ | {capx_strictness, boolean()}
| {disconnect_cb, evaluable()}
| {dpr_timeout, 'Unsigned32'()}
| {dpa_timeout, 'Unsigned32'()}
diff --git a/lib/diameter/src/base/diameter_callback.erl b/lib/diameter/src/base/diameter_callback.erl
index f479cb6612..0e445492b8 100644
--- a/lib/diameter/src/base/diameter_callback.erl
+++ b/lib/diameter/src/base/diameter_callback.erl
@@ -35,7 +35,7 @@
%% in a callback applied to the atom-valued callback name and argument
%% list. For all callbacks not to this module, the 'extra' field is a
%% list of additional arguments, following arguments supplied by
-%% diameter but preceeding those of the diameter:evaluable() being
+%% diameter but preceding those of the diameter:evaluable() being
%% applied.
%%
%% For example, the following config to diameter:start_service/2, in
diff --git a/lib/diameter/src/base/diameter_config.erl b/lib/diameter/src/base/diameter_config.erl
index fdbbd412a1..e10804c931 100644
--- a/lib/diameter/src/base/diameter_config.erl
+++ b/lib/diameter/src/base/diameter_config.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -580,6 +580,9 @@ opt({K, Tmo})
K == dpa_timeout ->
?IS_UINT32(Tmo);
+opt({capx_strictness, B}) ->
+ is_boolean(B);
+
opt({length_errors, T}) ->
lists:member(T, [exit, handle, discard]);
@@ -865,7 +868,7 @@ init_cb(List) ->
V <- [proplists:get_value(F, List, D)]],
#diameter_callback{} = list_to_tuple([diameter_callback | Values]).
-%% Retreive and validate.
+%% Retrieve and validate.
get_opt(Key, List, Def, Other) ->
init_opt(Key, get_opt(Key, List, Def), [Def|Other]).
diff --git a/lib/diameter/src/base/diameter_peer_fsm.erl b/lib/diameter/src/base/diameter_peer_fsm.erl
index 996e75a8d3..46d231da74 100644
--- a/lib/diameter/src/base/diameter_peer_fsm.erl
+++ b/lib/diameter/src/base/diameter_peer_fsm.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -128,6 +128,7 @@
%% outgoing DPR; boolean says whether or not
%% the request was sent explicitly with
%% diameter:call/4.
+ strict :: boolean(),
length_errors :: exit | handle | discard,
incoming_maxlen :: integer() | infinity}).
@@ -233,6 +234,7 @@ i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) ->
proplists:get_value(dpa_timeout, Opts, ?DPA_TIMEOUT)}),
Tmo = proplists:get_value(capx_timeout, Opts, ?CAPX_TIMEOUT),
+ Strictness = proplists:get_value(capx_strictness, Opts, true),
OnLengthErr = proplists:get_value(length_errors, Opts, exit),
{TPid, Addrs} = start_transport(T, Rest, Svc),
@@ -246,6 +248,7 @@ i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) ->
mode = M,
service = svc(Svc, Addrs),
length_errors = OnLengthErr,
+ strict = Strictness,
incoming_maxlen = Maxlen}.
%% The transport returns its local ip addresses so that different
%% transports on the same service can use different local addresses.
@@ -356,7 +359,7 @@ handle_info(T, #state{} = State) ->
%% Note that there's no guarantee that the service and transport
%% capabilities are good enough to build a CER/CEA that can be
-%% succesfully encoded. It's not checked at diameter:add_transport/2
+%% successfully encoded. It's not checked at diameter:add_transport/2
%% since this can be called before creating the service.
%% terminate/2
@@ -454,6 +457,9 @@ transition({timeout, _}, _) ->
%% Outgoing message.
transition({send, Msg}, S) ->
outgoing(Msg, S);
+transition({send, Msg, Route}, S) ->
+ put_route(Route),
+ outgoing(Msg, S);
%% Request for graceful shutdown at remove_transport, stop_service of
%% application shutdown.
@@ -483,8 +489,10 @@ transition({'DOWN', _, process, TPid, _},
= S) ->
start_next(S);
-%% Transport has died after connection timeout.
-transition({'DOWN', _, process, _, _}, _) ->
+%% Transport has died after connection timeout, or handler process has
+%% died.
+transition({'DOWN', _, process, Pid, _}, _) ->
+ erase_route(Pid),
ok;
%% State query.
@@ -494,6 +502,40 @@ transition({state, Pid}, #state{state = S, transport = TPid}) ->
%% Crash on anything unexpected.
+%% put_route/1
+%%
+%% Map identifiers in an outgoing request to be able to lookup the
+%% handler process when the answer is received.
+
+put_route({Pid, Ref, Seqs}) ->
+ MRef = monitor(process, Pid),
+ put(Pid, Seqs),
+ put(Seqs, {Pid, Ref, MRef}).
+
+%% get_route/1
+
+get_route(#diameter_packet{header = #diameter_header{is_request = false}}
+ = Pkt) ->
+ Seqs = diameter_codec:sequence_numbers(Pkt),
+ case erase(Seqs) of
+ {Pid, Ref, MRef} ->
+ demonitor(MRef),
+ erase(Pid),
+ {Pid, Ref, self()};
+ undefined ->
+ false
+ end;
+
+get_route(_) ->
+ false.
+
+%% erase_route/1
+
+erase_route(Pid) ->
+ erase(erase(Pid)).
+
+%% capx/1
+
capx(recv_CER) ->
'CER';
capx({'Wait-CEA', _, _}) ->
@@ -576,8 +618,7 @@ incoming({Msg, NPid}, S) ->
T
catch
{?MODULE, Name, Pkt} ->
- S#state.parent ! {recv, self(), Name, {Pkt, NPid}},
- rcv(Name, Pkt, S)
+ incoming(Name, Pkt, NPid, S)
end;
incoming(Msg, S) ->
@@ -585,10 +626,15 @@ incoming(Msg, S) ->
recv(Msg, S)
catch
{?MODULE, Name, Pkt} ->
- S#state.parent ! {recv, self(), Name, Pkt},
- rcv(Name, Pkt, S)
+ incoming(Name, Pkt, false, S)
end.
+%% incoming/4
+
+incoming(Name, Pkt, NPid, #state{parent = Pid} = S) ->
+ Pid ! {recv, self(), get_route(Pkt), Name, Pkt, NPid},
+ rcv(Name, Pkt, S).
+
%% recv/2
recv(#diameter_packet{header = #diameter_header{} = Hdr}
@@ -614,6 +660,17 @@ recv1(_,
when M < size(Bin) ->
invalid(false, incoming_maxlen_exceeded, {size(Bin), H});
+%% Ignore anything but an expected CER/CEA if so configured. This is
+%% non-standard behaviour.
+recv1(Name, _, #state{state = {'Wait-CEA', _, _},
+ strict = false})
+ when Name /= 'CEA' ->
+ ok;
+recv1(Name, _, #state{state = recv_CER,
+ strict = false})
+ when Name /= 'CER' ->
+ ok;
+
%% Incoming request after outgoing DPR: discard. Don't discard DPR, so
%% both ends don't do so when sending simultaneously.
recv1(Name,
diff --git a/lib/diameter/src/base/diameter_service.erl b/lib/diameter/src/base/diameter_service.erl
index ccf68f4d93..e4f77e3a24 100644
--- a/lib/diameter/src/base/diameter_service.erl
+++ b/lib/diameter/src/base/diameter_service.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -1858,13 +1858,6 @@ eq(Any, Id, PeerId) ->
%% OctetString() can be specified as an iolist() so test for string
%% rather then term equality.
-%% transports/1
-
-transports(#state{watchdogT = WatchdogT}) ->
- ets:select(WatchdogT, [{#watchdog{peer = '$1', _ = '_'},
- [{'is_pid', '$1'}],
- ['$1']}]).
-
%% ---------------------------------------------------------------------------
%% # service_info/2
%% ---------------------------------------------------------------------------
@@ -1887,7 +1880,6 @@ transports(#state{watchdogT = WatchdogT}) ->
-define(ALL_INFO, [capabilities,
applications,
transport,
- pending,
options]).
%% The rest.
@@ -1981,7 +1973,6 @@ complete_info(Item, #state{service = Svc} = S) ->
applications -> info_apps(S);
transport -> info_transport(S);
options -> info_options(S);
- pending -> info_pending(S);
keys -> ?ALL_INFO ++ ?CAP_INFO ++ ?OTHER_INFO;
all -> service_info(?ALL_INFO, S);
statistics -> info_stats(S);
@@ -2189,13 +2180,6 @@ info_apps(#state{service = #diameter_service{applications = Apps}}) ->
mk_app(#diameter_app{} = A) ->
lists:zip(record_info(fields, diameter_app), tl(tuple_to_list(A))).
-%% info_pending/1
-%%
-%% One entry for each outgoing request whose answer is outstanding.
-
-info_pending(#state{} = S) ->
- diameter_traffic:pending(transports(S)).
-
%% info_info/1
%%
%% Extract process_info from connections info.
diff --git a/lib/diameter/src/base/diameter_sup.erl b/lib/diameter/src/base/diameter_sup.erl
index 482289cb9a..01c51f0856 100644
--- a/lib/diameter/src/base/diameter_sup.erl
+++ b/lib/diameter/src/base/diameter_sup.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -42,7 +42,7 @@
-define(TABLES, [{diameter_sequence, [set]},
{diameter_service, [set, {keypos, 3}]},
- {diameter_request, [bag]},
+ {diameter_request, [set]},
{diameter_config, [bag, {keypos, 2}]}]).
%% start_link/0
diff --git a/lib/diameter/src/base/diameter_traffic.erl b/lib/diameter/src/base/diameter_traffic.erl
index d93a3e71e3..bc1ccf4feb 100644
--- a/lib/diameter/src/base/diameter_traffic.erl
+++ b/lib/diameter/src/base/diameter_traffic.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -30,7 +30,7 @@
-export([send_request/4]).
%% towards diameter_watchdog
--export([receive_message/4]).
+-export([receive_message/6]).
%% towards diameter_peer_fsm and diameter_watchdog
-export([incr/4,
@@ -40,11 +40,11 @@
%% towards diameter_service
-export([make_recvdata/1,
peer_up/1,
- peer_down/1,
- pending/1]).
+ peer_down/1]).
-%% towards ?MODULE
--export([send/1]). %% send from remote node
+%% internal
+-export([send/1, %% send from remote node
+ init/1]). %% monitor process start
-include_lib("diameter/include/diameter.hrl").
-include("diameter_internal.hrl").
@@ -57,14 +57,12 @@
-define(DEFAULT_TIMEOUT, 5000). %% for outgoing requests
-define(DEFAULT_SPAWN_OPTS, []).
-%% Table containing outgoing requests for which a reply has yet to be
-%% received.
+%% Table containing outgoing entries that live and die with
+%% peer_up/down. The name is historic, since the table used to contain
+%% information about outgoing requests for which an answer has yet to
+%% be received.
-define(REQUEST_TABLE, diameter_request).
-%% Workaround for dialyzer's lack of understanding of match specs.
--type match(T)
- :: T | '_' | '$1' | '$2' | '$3' | '$4'.
-
%% Record diameter:call/4 options are parsed into.
-record(options,
{filter = none :: diameter:peer_filter(),
@@ -72,7 +70,7 @@
timeout = ?DEFAULT_TIMEOUT :: 0..16#FFFFFFFF,
detach = false :: boolean()}).
-%% Term passed back to receive_message/4 with every incoming message.
+%% Term passed back to receive_message/6 with every incoming message.
-record(recvdata,
{peerT :: ets:tid(),
service_name :: diameter:service_name(),
@@ -87,12 +85,12 @@
%% Record stored in diameter_request for each outgoing request.
-record(request,
- {ref :: match(reference()), %% used to receive answer
- caller :: match(pid()), %% calling process
- handler :: match(pid()), %% request process
- transport :: match(pid()), %% peer process
- caps :: match(#diameter_caps{}), %% of connection
- packet :: match(#diameter_packet{})}). %% of request
+ {ref :: reference(), %% used to receive answer
+ caller :: pid() | undefined, %% calling process
+ handler :: pid(), %% request process
+ transport :: pid() | undefined, %% peer process
+ caps :: #diameter_caps{} | undefined, %% of connection
+ packet :: #diameter_packet{} | undefined}). %% of request
%% ---------------------------------------------------------------------------
%% # make_recvdata/1
@@ -113,26 +111,27 @@ make_recvdata([SvcName, PeerT, Apps, SvcOpts | _]) ->
%% peer_up/1
%% ---------------------------------------------------------------------------
-%% Insert an element that is used to detect whether or not there has
-%% been a failover when inserting an outgoing request.
+%% Start a process that dies with peer_down/1, on which request
+%% processes can monitor. There is no other process that dies with
+%% peer_down since failover doesn't imply the loss of transport in the
+%% case of a watchdog transition into state SUSPECT.
peer_up(TPid) ->
- ets:insert(?REQUEST_TABLE, {TPid}).
+ proc_lib:start(?MODULE, init, [TPid]).
+
+init(TPid) ->
+ ets:insert(?REQUEST_TABLE, {TPid, self()}),
+ proc_lib:init_ack(self()),
+ proc_lib:hibernate(erlang, exit, [{shutdown, TPid}]).
%% ---------------------------------------------------------------------------
%% peer_down/1
%% ---------------------------------------------------------------------------
peer_down(TPid) ->
- ets:delete_object(?REQUEST_TABLE, {TPid}),
- lists:foreach(fun failover/1, ets:lookup(?REQUEST_TABLE, TPid)).
-%% Note that a request process can store its request after failover
-%% notifications are sent here: insert_request/2 sends the notification
-%% in that case.
-
-%% failover/1
-
-failover({_TPid, {Pid, TRef}}) ->
- Pid ! {failover, TRef}.
+ [{_, Pid}] = ets:lookup(?REQUEST_TABLE, TPid),
+ ets:delete(?REQUEST_TABLE, TPid),
+ Pid ! ok, %% make it die
+ Pid.
%% ---------------------------------------------------------------------------
%% incr/4
@@ -207,54 +206,25 @@ incr_rc(Dir, Pkt, TPid, Dict0) ->
incr_rc(Dir, Pkt, TPid, {Dict0, Dict0, Dict0}).
%% ---------------------------------------------------------------------------
-%% pending/1
-%% ---------------------------------------------------------------------------
-
-pending(TPids) ->
- MatchSpec = [{{'$1',
- #request{caller = '$2',
- handler = '$3',
- transport = '$4',
- _ = '_'},
- '_'},
- [?ORCOND([{'==', T, '$4'} || T <- TPids])],
- [{{'$1', [{{caller, '$2'}},
- {{handler, '$3'}},
- {{transport, '$4'}}]}}]}],
-
- try
- ets:select(?REQUEST_TABLE, MatchSpec)
- catch
- error: badarg -> [] %% service has gone down
- end.
-
-%% ---------------------------------------------------------------------------
-%% # receive_message/4
+%% # receive_message/6
%%
%% Handle an incoming Diameter message.
%% ---------------------------------------------------------------------------
-%% Handle an incoming Diameter message in the watchdog process. This
-%% used to come through the service process but this avoids that
-%% becoming a bottleneck.
+%% Handle an incoming Diameter message in the watchdog process.
-receive_message(TPid, {Pkt, NPid}, Dict0, RecvData) ->
- NPid ! {diameter, incoming(TPid, Pkt, Dict0, RecvData)};
+receive_message(TPid, Route, Pkt, false, Dict0, RecvData) ->
+ incoming(TPid, Route, Pkt, Dict0, RecvData);
-receive_message(TPid, Pkt, Dict0, RecvData) ->
- incoming(TPid, Pkt, Dict0, RecvData).
+receive_message(TPid, Route, Pkt, NPid, Dict0, RecvData) ->
+ NPid ! {diameter, incoming(TPid, Route, Pkt, Dict0, RecvData)}.
%% incoming/4
-incoming(TPid, Pkt, Dict0, RecvData)
+incoming(TPid, Route, Pkt, Dict0, RecvData)
when is_pid(TPid) ->
#diameter_packet{header = #diameter_header{is_request = R}} = Pkt,
- recv(R,
- (not R) andalso lookup_request(Pkt, TPid),
- TPid,
- Pkt,
- Dict0,
- RecvData).
+ recv(R, Route, TPid, Pkt, Dict0, RecvData).
%% recv/6
@@ -269,8 +239,8 @@ recv(true, false, TPid, Pkt, Dict0, T) ->
end;
%% ... answer to known request ...
-recv(false, #request{ref = Ref, handler = Pid} = Req, _, Pkt, Dict0, _) ->
- Pid ! {answer, Ref, Req, Dict0, Pkt},
+recv(false, {Pid, Ref, TPid}, _, Pkt, Dict0, _) ->
+ Pid ! {answer, Ref, TPid, Dict0, Pkt},
{answer, Pid};
%% Note that failover could have happened prior to this message being
@@ -1503,32 +1473,39 @@ send_R(Pkt0,
packet = Pkt0},
incr(send, Pkt, TPid, AppDict),
- TRef = send_request(TPid, Pkt, Req, SvcName, Timeout),
+ {TRef, MRef} = zend_requezt(TPid, Pkt, Req, SvcName, Timeout),
Pid ! Ref, %% tell caller a send has been attempted
handle_answer(SvcName,
App,
- recv_A(Timeout, SvcName, App, Opts, {TRef, Req})).
+ recv_A(Timeout, SvcName, App, Opts, {TRef, MRef, Req})).
%% recv_A/5
-recv_A(Timeout, SvcName, App, Opts, {TRef, #request{ref = Ref} = Req}) ->
+recv_A(Timeout, SvcName, App, Opts, {TRef, MRef, #request{ref = Ref} = Req}) ->
%% Matching on TRef below ensures we ignore messages that pertain
%% to a previous transport prior to failover. The answer message
- %% includes the #request{} since it's not necessarily Req; that
- %% is, from the last peer to which we've transmitted.
+ %% includes the pid of the transport on which it was received,
+ %% which may not be the last peer to which we've transmitted.
receive
- {answer = A, Ref, Rq, Dict0, Pkt} -> %% Answer from peer
- {A, Rq, Dict0, Pkt};
+ {answer = A, Ref, TPid, Dict0, Pkt} -> %% Answer from peer
+ {A, #request{} = erase(TPid), Dict0, Pkt};
{timeout = Reason, TRef, _} -> %% No timely reply
{error, Req, Reason};
- {failover, TRef} -> %% Service says peer has gone down
- retransmit(pick_peer(SvcName, App, Req, Opts),
- Req,
- Opts,
- SvcName,
- Timeout)
+ {'DOWN', MRef, process, _, _} when false /= MRef -> %% local peer_down
+ failover(SvcName, App, Req, Opts, Timeout);
+ {failover, TRef} -> %% local or remote peer_down
+ failover(SvcName, App, Req, Opts, Timeout)
end.
+%% failover/5
+
+failover(SvcName, App, Req, Opts, Timeout) ->
+ retransmit(pick_peer(SvcName, App, Req, Opts),
+ Req,
+ Opts,
+ SvcName,
+ Timeout).
+
%% handle_answer/3
handle_answer(SvcName, App, {error, Req, Reason}) ->
@@ -1705,44 +1682,63 @@ encode(DictT, TPid, #diameter_packet{bin = undefined} = Pkt) ->
encode(_, _, #diameter_packet{} = Pkt) ->
Pkt.
+%% zend_requezt/5
+%%
+%% Strip potentially large record fields that aren't used by the
+%% processes the records can be send to, possibly on a remote node.
+
+zend_requezt(TPid, Pkt, Req, SvcName, Timeout) ->
+ put(TPid, Req),
+ send_request(TPid, z(Pkt), Req, SvcName, Timeout).
+
%% send_request/5
send_request(TPid, #diameter_packet{bin = Bin} = Pkt, Req, _SvcName, Timeout)
when node() == node(TPid) ->
Seqs = diameter_codec:sequence_numbers(Bin),
TRef = erlang:start_timer(Timeout, self(), TPid),
- Entry = {Seqs, #request{handler = Pid} = Req, TRef},
-
- %% Ensure that request table is cleaned even if the process is
- %% killed.
- spawn(fun() -> diameter_lib:wait([Pid]), delete_request(Entry) end),
-
- insert_request(Entry),
- send(TPid, Pkt),
- TRef;
+ send(TPid, Pkt, _Route = {self(), Req#request.ref, Seqs}),
+ {TRef, _MRef = peer_monitor(TPid, TRef)};
%% Send using a remote transport: spawn a process on the remote node
%% to relay the answer.
send_request(TPid, #diameter_packet{} = Pkt, Req, SvcName, Timeout) ->
TRef = erlang:start_timer(Timeout, self(), TPid),
- T = {TPid, Pkt, Req, SvcName, Timeout, TRef},
+ T = {TPid, Pkt, z(Req), SvcName, Timeout, TRef},
spawn(node(TPid), ?MODULE, send, [T]),
- TRef.
+ {TRef, false}.
+
+%% z/1
+%%
+%% Avoid sending potentially large terms unnecessarily. The records
+%% themselves are retained since they're sent between nodes in send/1
+%% and changing what's sent causes upgrade issues.
+
+z(#request{ref = Ref, handler = Pid}) ->
+ #request{ref = Ref,
+ handler = Pid};
+
+z(#diameter_packet{header = H, bin = Bin, transport_data = T}) ->
+ #diameter_packet{header = H,
+ bin = Bin,
+ transport_data = T}.
%% send/1
send({TPid, Pkt, #request{handler = Pid} = Req0, SvcName, Timeout, TRef}) ->
Req = Req0#request{handler = self()},
- recv(TPid, Pid, TRef, send_request(TPid, Pkt, Req, SvcName, Timeout)).
+ recv(TPid, Pid, TRef, zend_requezt(TPid, Pkt, Req, SvcName, Timeout)).
%% recv/4
%%
%% Relay an answer from a remote node.
-recv(TPid, Pid, TRef, LocalTRef) ->
+recv(TPid, Pid, TRef, {LocalTRef, MRef}) ->
receive
{answer, _, _, _, _} = A ->
Pid ! A;
+ {'DOWN', MRef, process, _, _} ->
+ Pid ! {failover, TRef};
{failover = T, LocalTRef} ->
Pid ! {T, TRef};
T ->
@@ -1751,14 +1747,13 @@ recv(TPid, Pid, TRef, LocalTRef) ->
%% send/2
-send(Pid, Pkt) -> %% Strip potentially large message terms.
- #diameter_packet{header = H,
- bin = Bin,
- transport_data = T}
- = Pkt,
- Pid ! {send, #diameter_packet{header = H,
- bin = Bin,
- transport_data = T}}.
+send(Pid, Pkt) ->
+ Pid ! {send, Pkt}.
+
+%% send/3
+
+send(Pid, Pkt, Route) ->
+ Pid ! {send, Pkt, Route}.
%% retransmit/4
@@ -1768,8 +1763,8 @@ retransmit({TPid, Caps, App}
= Req,
SvcName,
Timeout) ->
- have_request(Pkt0, TPid) %% Don't failover to a peer we've
- andalso ?THROW(timeout), %% already sent to.
+ undefined == get(TPid) %% Don't failover to a peer we've
+ orelse ?THROW(timeout), %% already sent to.
Pkt = make_retransmit_packet(Pkt0),
@@ -1822,56 +1817,20 @@ resend_request(Pkt0,
?LOG(retransmission, Pkt#diameter_packet.header),
incr(TPid, {msg_id(Pkt, AppDict), send, retransmission}),
- TRef = send_request(TPid, Pkt, Req, SvcName, Tmo),
- {TRef, Req}.
-
-%% insert_request/1
-
-insert_request({_Seqs, #request{transport = TPid}, TRef} = T) ->
- ets:insert(?REQUEST_TABLE, [T, {TPid, {self(), TRef}}]),
- is_peer_up(TPid)
- orelse (self() ! {failover, TRef}). %% failover/1 may have missed
-
-%% is_peer_up/1
-%%
-%% Is the entry written by peer_up/1 and deleted by peer_down/1 still
-%% in the request table?
+ {TRef, MRef} = zend_requezt(TPid, Pkt, Req, SvcName, Tmo),
+ {TRef, MRef, Req}.
-is_peer_up(TPid) ->
- Spec = [{{TPid}, [], ['$_']}],
- '$end_of_table' /= ets:select(?REQUEST_TABLE, Spec, 1).
+%% peer_monitor/2
-%% lookup_request/2
-%%
-%% Note the match on both the key and transport pid. The latter is
-%% necessary since the same Hop-by-Hop and End-to-End identifiers are
-%% reused in the case of retransmission.
-
-lookup_request(Msg, TPid) ->
- Seqs = diameter_codec:sequence_numbers(Msg),
- Spec = [{{Seqs, #request{transport = TPid, _ = '_'}, '_'},
- [],
- ['$_']}],
- case ets:select(?REQUEST_TABLE, Spec) of
- [{_, Req, _}] ->
- Req;
- [] ->
+peer_monitor(TPid, TRef) ->
+ case ets:lookup(?REQUEST_TABLE, TPid) of %% at peer_up/1
+ [{_, MPid}] ->
+ monitor(process, MPid);
+ [] -> %% transport has gone down
+ self() ! {failover, TRef},
false
end.
-%% delete_request/1
-
-delete_request({_Seqs, #request{handler = Pid, transport = TPid}, TRef} = T) ->
- Spec = [{R, [], [true]} || R <- [T, {TPid, {Pid, TRef}}]],
- ets:select_delete(?REQUEST_TABLE, Spec).
-
-%% have_request/2
-
-have_request(Pkt, TPid) ->
- Seqs = diameter_codec:sequence_numbers(Pkt),
- Pat = {Seqs, #request{transport = TPid, _ = '_'}, '_'},
- '$end_of_table' /= ets:select(?REQUEST_TABLE, [{Pat, [], ['$_']}], 1).
-
%% get_destination/2
get_destination(Dict, Msg) ->
diff --git a/lib/diameter/src/base/diameter_watchdog.erl b/lib/diameter/src/base/diameter_watchdog.erl
index 2ba60a65fb..f28b8f2910 100644
--- a/lib/diameter/src/base/diameter_watchdog.erl
+++ b/lib/diameter/src/base/diameter_watchdog.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -283,7 +283,7 @@ event(Msg,
?LOG(transition, {From, To}).
data(Msg, TPid, reopen, okay) ->
- {recv, TPid, 'DWA', _Pkt} = Msg, %% assert
+ {recv, TPid, false, 'DWA', _Pkt, _NPid} = Msg, %% assert
{TPid, T} = eraser(open),
[T];
@@ -447,12 +447,14 @@ transition({'DOWN', _, process, TPid, _Reason} = D,
end;
%% Incoming message.
-transition({recv, TPid, Name, PktT}, #watchdog{transport = TPid} = S) ->
+transition({recv, TPid, Route, Name, Pkt, NPid},
+ #watchdog{transport = TPid}
+ = S) ->
try
- incoming(Name, PktT, S)
+ incoming(Name, Pkt, NPid, S)
catch
#watchdog{dictionary = Dict0, receive_data = T} = NS ->
- diameter_traffic:receive_message(TPid, PktT, Dict0, T),
+ diameter_traffic:receive_message(TPid, Route, Pkt, NPid, Dict0, T),
NS
end;
@@ -582,15 +584,17 @@ send_watchdog(#watchdog{pending = false,
%% Don't count encode errors since we don't expect any on DWR/DWA.
-%% incoming/3
+%% incoming/4
-incoming(Name, {Pkt, NPid}, S) ->
- NS = recv(Name, Pkt, S),
- NPid ! {diameter, discard},
- NS;
+incoming(Name, Pkt, false, S) ->
+ recv(Name, Pkt, S);
-incoming(Name, Pkt, S) ->
- recv(Name, Pkt, S).
+incoming(Name, Pkt, NPid, S) ->
+ try
+ recv(Name, Pkt, S)
+ after
+ NPid ! {diameter, discard}
+ end.
%% recv/3
diff --git a/lib/diameter/src/diameter.appup.src b/lib/diameter/src/diameter.appup.src
index b1b8e38d39..eb5a5a44f3 100644
--- a/lib/diameter/src/diameter.appup.src
+++ b/lib/diameter/src/diameter.appup.src
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -50,10 +50,8 @@
{"1.11", [{restart_application, diameter}]}, %% 18.1
{"1.11.1", [{restart_application, diameter}]}, %% 18.2
{"1.11.2", [{restart_application, diameter}]}, %% 18.3
- {"1.12", [{load_module, diameter_lib}, %% 19.0
- {load_module, diameter_traffic},
- {load_module, diameter_tcp},
- {load_module, diameter_sctp}]}
+ {"1.12", [{restart_application, diameter}]}, %% 19.0
+ {"1.12.1", [{restart_application, diameter}]} %% 19.1
],
[
{"0.9", [{restart_application, diameter}]},
@@ -85,9 +83,7 @@
{"1.11", [{restart_application, diameter}]},
{"1.11.1", [{restart_application, diameter}]},
{"1.11.2", [{restart_application, diameter}]},
- {"1.12", [{load_module, diameter_sctp},
- {load_module, diameter_tcp},
- {load_module, diameter_traffic},
- {load_module, diameter_lib}]}
+ {"1.12", [{restart_application, diameter}]},
+ {"1.12.1", [{restart_application, diameter}]}
]
}.
diff --git a/lib/diameter/src/info/diameter_info.erl b/lib/diameter/src/info/diameter_info.erl
index 59a3b94ee4..2a27600346 100644
--- a/lib/diameter/src/info/diameter_info.erl
+++ b/lib/diameter/src/info/diameter_info.erl
@@ -195,7 +195,7 @@ format(Tables, SFun, CFun)
%%%
%%% Description: Pretty-print records in a named tables as collected
%%% from local and remote nodes. Each table listing is
-%%% preceeded by a banner.
+%%% preceded by a banner.
%%% ----------------------------------------------------------
format(Local, Remote, SFun) ->
diff --git a/lib/diameter/src/transport/diameter_sctp.erl b/lib/diameter/src/transport/diameter_sctp.erl
index f48e4347ee..ad9f4b0d80 100644
--- a/lib/diameter/src/transport/diameter_sctp.erl
+++ b/lib/diameter/src/transport/diameter_sctp.erl
@@ -402,7 +402,7 @@ handle_info(T, #transport{} = S) ->
handle_info(T, #listener{} = S) ->
{noreply, #listener{} = l(T,S)}.
-%% Prior to the possiblity of setting pool_size on in transport
+%% Prior to the possibility of setting pool_size on in transport
%% configuration, a new accepting transport was only started following
%% the death of a predecessor, so that there was only at most one
%% previously started transport process waiting for an association.
diff --git a/lib/diameter/test/diameter_pool_SUITE.erl b/lib/diameter/test/diameter_pool_SUITE.erl
index eadb354a1d..383fa0a031 100644
--- a/lib/diameter/test/diameter_pool_SUITE.erl
+++ b/lib/diameter/test/diameter_pool_SUITE.erl
@@ -115,7 +115,7 @@ connect(ClientProt, ServerProt) ->
%% 'up' events. (Although it's likely.)
sleep(),
{9,5} = count("server", LRef, accept), %% 5 connections + 4 accepting
- %% Ensure ther are still the expected number of accepting transports
+ %% Ensure there are still the expected number of accepting transports
%% after stopping the client service.
ok = diameter:stop_service("client"),
sleep(),
diff --git a/lib/diameter/vsn.mk b/lib/diameter/vsn.mk
index 23219950bb..94d9d72a48 100644
--- a/lib/diameter/vsn.mk
+++ b/lib/diameter/vsn.mk
@@ -1,6 +1,6 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2016. All Rights Reserved.
+# Copyright Ericsson AB 2010-2017. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -17,5 +17,5 @@
# %CopyrightEnd%
APPLICATION = diameter
-DIAMETER_VSN = 1.12.1
+DIAMETER_VSN = 1.12.2
APP_VSN = $(APPLICATION)-$(DIAMETER_VSN)$(PRE_VSN)
diff --git a/lib/edoc/src/edoc_tags.erl b/lib/edoc/src/edoc_tags.erl
index 7e59f373b2..da078de0b9 100644
--- a/lib/edoc/src/edoc_tags.erl
+++ b/lib/edoc/src/edoc_tags.erl
@@ -227,7 +227,7 @@ filter_tags([#tag{name = N, line = L} = T | Ts], Tags, Where, Ts1) ->
filter_tags([], _, _, Ts) ->
lists:reverse(Ts).
-%% Check occurrances of tags.
+%% Check occurrences of tags.
check_tags(Ts, Allow, Single, Where) ->
check_tags(Ts, Allow, Single, Where, false, sets:new()).
diff --git a/lib/eldap/test/README b/lib/eldap/test/README
index ec774c1ae3..af1bf6a082 100644
--- a/lib/eldap/test/README
+++ b/lib/eldap/test/README
@@ -16,7 +16,7 @@ To start slapd:
This will however not work, since slapd is guarded by apparmor that checks that slapd does not access other than allowed files...
-To make a local extension of alowed operations:
+To make a local extension of allowed operations:
sudo emacs /etc/apparmor.d/local/usr.sbin.slapd
and, after the change (yes, at least on Ubuntu it is right to edit ../local/.. but run with another file):
diff --git a/lib/erl_interface/doc/src/erl_call.xml b/lib/erl_interface/doc/src/erl_call.xml
index f1e52b1889..426f6b88ca 100644
--- a/lib/erl_interface/doc/src/erl_call.xml
+++ b/lib/erl_interface/doc/src/erl_call.xml
@@ -193,7 +193,7 @@ erl_call -s -a 'erlang halt' -n madonna
<p>To apply with many arguments:</p>
<code type="none"><![CDATA[
-erl_call -s -a 'lists map [{math,sqrt},[1,4,9,16,25]]' -n madonna
+erl_call -s -a 'lists seq [1,10]' -n madonna
]]></code>
<p>To evaluate some expressions
diff --git a/lib/erl_interface/src/README b/lib/erl_interface/src/README
index feee2e48e8..7591615f78 100644
--- a/lib/erl_interface/src/README
+++ b/lib/erl_interface/src/README
@@ -11,7 +11,7 @@ Also, assertions are enabled, meaning that the code will be a
little bit slower. In the final release, there will be two
alternative libraries shipped, with and without assertions.
-If an assertion triggers, there will be a printout similiar to this
+If an assertion triggers, there will be a printout similar to this
one:
Assertion failed: ep != NULL in erl_eterm.c, line 694
diff --git a/lib/erl_interface/src/legacy/erl_marshal.c b/lib/erl_interface/src/legacy/erl_marshal.c
index 2bdf5f2134..527ae0ef8f 100644
--- a/lib/erl_interface/src/legacy/erl_marshal.c
+++ b/lib/erl_interface/src/legacy/erl_marshal.c
@@ -1626,7 +1626,7 @@ static int cmp_refs(unsigned char **e1, unsigned char **e2)
if (cre1 != cre2)
return cre1 < cre2 ? -1 : 1;
- /* ... and then finaly ids. */
+ /* ... and then finally ids. */
if (n1 != n2) {
unsigned char zero[] = {0, 0, 0, 0};
if (n1 > n2)
@@ -1791,7 +1791,7 @@ static int cmp_exe2(unsigned char **e1, unsigned char **e2)
if (port1.creation < port2.creation) return -1;
else if (port1.creation > port2.creation) return 1;
- /* ... and then finaly ids. */
+ /* ... and then finally ids. */
if (port1.id < port2.id) return -1;
else if (port1.id > port2.id) return 1;
diff --git a/lib/erl_interface/src/misc/ei_locking.c b/lib/erl_interface/src/misc/ei_locking.c
index 85b2a5fd8b..a0e00b7871 100644
--- a/lib/erl_interface/src/misc/ei_locking.c
+++ b/lib/erl_interface/src/misc/ei_locking.c
@@ -76,8 +76,8 @@ ei_mutex_t *ei_mutex_create(void)
return l;
}
-/*
- * Free a mutex and the structure asociated with it.
+/*
+ * Free a mutex and the structure associated with it.
*
* This function attempts to obtain the mutex before releasing it;
* If nblock == 1 and the mutex was unavailable, the function will
diff --git a/lib/erl_interface/test/ei_decode_SUITE.erl b/lib/erl_interface/test/ei_decode_SUITE.erl
index 1495a0d5d9..10e90685c8 100644
--- a/lib/erl_interface/test/ei_decode_SUITE.erl
+++ b/lib/erl_interface/test/ei_decode_SUITE.erl
@@ -99,7 +99,7 @@ test_ei_decode_ulonglong(Config) when is_list(Config) ->
%% ######################################################################## %%
-%% A "character" for us is an 8 bit integer, alwasy positive, i.e.
+%% A "character" for us is an 8 bit integer, always positive, i.e.
%% it is unsigned.
%% FIXME maybe the API should change to use "unsigned char" to be clear?!
diff --git a/lib/erl_interface/test/erl_eterm_SUITE.erl b/lib/erl_interface/test/erl_eterm_SUITE.erl
index 0e51a50c19..7fd46694b8 100644
--- a/lib/erl_interface/test/erl_eterm_SUITE.erl
+++ b/lib/erl_interface/test/erl_eterm_SUITE.erl
@@ -31,7 +31,7 @@
%%% 2. Constructing terms (the erl_mk_xxx() functions and erl_copy_term()).
%%% 3. Extracting & info functions (erl_hd(), erl_length() etc).
%%% 4. I/O list functions.
-%%% 5. Miscellanous functions.
+%%% 5. Miscellaneous functions.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-export([all/0, suite/0,
diff --git a/lib/eunit/doc/src/notes.xml b/lib/eunit/doc/src/notes.xml
index 8509f44ffc..d7ec2108e9 100644
--- a/lib/eunit/doc/src/notes.xml
+++ b/lib/eunit/doc/src/notes.xml
@@ -498,7 +498,7 @@
<list>
<item>
<p>
- Miscellanous updates.</p>
+ Miscellaneous updates.</p>
<p>
Own Id: OTP-8038</p>
</item>
diff --git a/lib/hipe/amd64/Makefile b/lib/hipe/amd64/Makefile
index 617f6749ac..d0da8cdff6 100644
--- a/lib/hipe/amd64/Makefile
+++ b/lib/hipe/amd64/Makefile
@@ -128,6 +128,7 @@ $(EBIN)/hipe_amd64_ra_postconditions.beam: ../main/hipe.hrl ../x86/hipe_x86.hrl
$(EBIN)/hipe_amd64_ra_sse2_postconditions.beam: ../main/hipe.hrl
$(EBIN)/hipe_amd64_registers.beam: ../rtl/hipe_literals.hrl
$(EBIN)/hipe_amd64_spill_restore.beam: ../main/hipe.hrl ../x86/hipe_x86.hrl ../flow/cfg.hrl ../x86/hipe_x86_spill_restore.erl
+$(EBIN)/hipe_amd64_subst.beam: ../x86/hipe_x86_subst.erl
$(EBIN)/hipe_amd64_x87.beam: ../x86/hipe_x86_x87.erl
$(EBIN)/hipe_amd64_sse2.beam: ../main/hipe.hrl ../x86/hipe_x86.hrl
$(EBIN)/hipe_rtl_to_amd64.beam: ../x86/hipe_rtl_to_x86.erl ../rtl/hipe_rtl.hrl
diff --git a/lib/hipe/amd64/hipe_amd64_encode.erl b/lib/hipe/amd64/hipe_amd64_encode.erl
index f8cc0c7d83..bda2824ffc 100644
--- a/lib/hipe/amd64/hipe_amd64_encode.erl
+++ b/lib/hipe/amd64/hipe_amd64_encode.erl
@@ -1316,6 +1316,7 @@ dotest1(OS) ->
RM64 = {rm64,rm_reg(?EDX)},
RM32 = {rm32,rm_reg(?EDX)},
RM16 = {rm16,rm_reg(?EDX)},
+ RM16REX = {rm16,rm_reg(?R13)},
RM8 = {rm8,rm_reg(?EDX)},
RM8REX = {rm8,rm_reg(?SIL)},
Rel32 = {rel32,Word32},
@@ -1479,6 +1480,7 @@ dotest1(OS) ->
t(OS,'test',{RM8,Imm8}),
t(OS,'test',{RM8REX,Imm8}),
t(OS,'test',{RM16,Imm16}),
+ t(OS,'test',{RM16REX,Imm16}),
t(OS,'test',{RM32,Imm32}),
t(OS,'test',{RM64,Imm32}),
t(OS,'test',{RM32,Reg32}),
diff --git a/lib/hipe/amd64/hipe_amd64_registers.erl b/lib/hipe/amd64/hipe_amd64_registers.erl
index a4cb71a106..a5cecef5a1 100644
--- a/lib/hipe/amd64/hipe_amd64_registers.erl
+++ b/lib/hipe/amd64/hipe_amd64_registers.erl
@@ -207,19 +207,14 @@ allocatable_x87() ->
nr_args() -> ?AMD64_NR_ARG_REGS.
-arg(N) ->
- if N < ?AMD64_NR_ARG_REGS ->
- case N of
- 0 -> ?ARG0;
- 1 -> ?ARG1;
- 2 -> ?ARG2;
- 3 -> ?ARG3;
- 4 -> ?ARG4;
- 5 -> ?ARG5;
- _ -> exit({?MODULE, arg, N})
- end;
- true ->
- exit({?MODULE, arg, N})
+arg(N) when N < ?AMD64_NR_ARG_REGS ->
+ case N of
+ 0 -> ?ARG0;
+ 1 -> ?ARG1;
+ 2 -> ?ARG2;
+ 3 -> ?ARG3;
+ 4 -> ?ARG4;
+ 5 -> ?ARG5
end.
is_arg(R) ->
@@ -240,11 +235,7 @@ args(Arity) when is_integer(Arity), Arity >= 0 ->
args(I, Rest) when I < 0 -> Rest;
args(I, Rest) -> args(I-1, [arg(I) | Rest]).
-ret(N) ->
- case N of
- 0 -> ?RAX;
- _ -> exit({?MODULE, ret, N})
- end.
+ret(0) -> ?RAX.
%% Note: the fact that (allocatable() UNION allocatable_x87() UNION
%% allocatable_sse2()) is a subset of call_clobbered() is hard-coded in
diff --git a/lib/hipe/cerl/cerl_to_icode.erl b/lib/hipe/cerl/cerl_to_icode.erl
index acad8a9da4..e37eae8a03 100644
--- a/lib/hipe/cerl/cerl_to_icode.erl
+++ b/lib/hipe/cerl/cerl_to_icode.erl
@@ -2621,7 +2621,7 @@ icode_switch_val(Arg, Fail, Length, Cases) ->
hipe_icode:mk_switch_val(Arg, Fail, Length, Cases).
icode_switch_tuple_arity(Arg, Fail, Length, Cases) ->
- SortedCases = lists:keysort(1, Cases), %% immitate BEAM compiler - Kostis
+ SortedCases = lists:keysort(1, Cases), %% imitate BEAM compiler - Kostis
hipe_icode:mk_switch_tuple_arity(Arg, Fail, Length, SortedCases).
diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl
index 5a4cf77b81..ea8cc1677d 100644
--- a/lib/hipe/cerl/erl_types.erl
+++ b/lib/hipe/cerl/erl_types.erl
@@ -228,7 +228,8 @@
-export([t_is_identifier/1]).
-endif.
--export_type([erl_type/0, opaques/0, type_table/0, var_table/0, cache/0]).
+-export_type([erl_type/0, opaques/0, type_table/0, mod_records/0,
+ var_table/0, cache/0]).
%%-define(DEBUG, true).
@@ -371,8 +372,9 @@
-type type_value() :: {{module(), {file:name(), erl_anno:line()},
erl_parse:abstract_type(), ArgNames :: [atom()]},
erl_type()}.
--type type_table() :: dict:dict(record_key() | type_key(),
- record_value() | type_value()).
+-type type_table() :: #{record_key() | type_key() =>
+ record_value() | type_value()}.
+-type mod_records() :: dict:dict(module(), type_table()).
-opaque var_table() :: #{atom() => erl_type()}.
@@ -516,7 +518,8 @@ list_contains_opaque(List, Opaques) ->
lists:any(fun(E) -> t_contains_opaque(E, Opaques) end, List).
%% t_find_opaque_mismatch/2 of two types should only be used if their
-%% t_inf is t_none() due to some opaque type violation.
+%% t_inf is t_none() due to some opaque type violation. However,
+%% 'error' is returned if a structure mismatch is found.
%%
%% The first argument of the function is the pattern and its second
%% argument the type we are matching against the pattern.
@@ -525,22 +528,30 @@ list_contains_opaque(List, Opaques) ->
'error' | {'ok', erl_type(), erl_type()}.
t_find_opaque_mismatch(T1, T2, Opaques) ->
- t_find_opaque_mismatch(T1, T2, T2, Opaques).
+ catch t_find_opaque_mismatch(T1, T2, T2, Opaques).
t_find_opaque_mismatch(?any, _Type, _TopType, _Opaques) -> error;
-t_find_opaque_mismatch(?none, _Type, _TopType, _Opaques) -> error;
+t_find_opaque_mismatch(?none, _Type, _TopType, _Opaques) -> throw(error);
t_find_opaque_mismatch(?list(T1, Tl1, _), ?list(T2, Tl2, _), TopType, Opaques) ->
t_find_opaque_mismatch_ordlists([T1, Tl1], [T2, Tl2], TopType, Opaques);
t_find_opaque_mismatch(T1, ?opaque(_) = T2, TopType, Opaques) ->
case is_opaque_type(T2, Opaques) of
- false -> {ok, TopType, T2};
+ false ->
+ case t_is_opaque(T1) andalso compatible_opaque_types(T1, T2) =/= [] of
+ true -> error;
+ false -> {ok, TopType, T2}
+ end;
true ->
t_find_opaque_mismatch(T1, t_opaque_structure(T2), TopType, Opaques)
end;
t_find_opaque_mismatch(?opaque(_) = T1, T2, TopType, Opaques) ->
%% The generated message is somewhat misleading:
case is_opaque_type(T1, Opaques) of
- false -> {ok, TopType, T1};
+ false ->
+ case t_is_opaque(T2) andalso compatible_opaque_types(T1, T2) =/= [] of
+ true -> error;
+ false -> {ok, TopType, T1}
+ end;
true ->
t_find_opaque_mismatch(t_opaque_structure(T1), T2, TopType, Opaques)
end;
@@ -556,7 +567,11 @@ t_find_opaque_mismatch(?tuple(_, _, _) = T1, ?tuple_set(_) = T2,
t_find_opaque_mismatch_lists(Tuples1, Tuples2, TopType, Opaques);
t_find_opaque_mismatch(T1, ?union(U2), TopType, Opaques) ->
t_find_opaque_mismatch_lists([T1], U2, TopType, Opaques);
-t_find_opaque_mismatch(_T1, _T2, _TopType, _Opaques) -> error.
+t_find_opaque_mismatch(T1, T2, _TopType, Opaques) ->
+ case t_is_none(t_inf(T1, T2, Opaques)) of
+ false -> error;
+ true -> throw(error)
+ end.
t_find_opaque_mismatch_ordlists(L1, L2, TopType, Opaques) ->
List = lists:zipwith(fun(T1, T2) ->
@@ -565,10 +580,11 @@ t_find_opaque_mismatch_ordlists(L1, L2, TopType, Opaques) ->
t_find_opaque_mismatch_list(List).
t_find_opaque_mismatch_lists(L1, L2, _TopType, Opaques) ->
- List = [t_find_opaque_mismatch(T1, T2, T2, Opaques) || T1 <- L1, T2 <- L2],
+ List = [catch t_find_opaque_mismatch(T1, T2, T2, Opaques) ||
+ T1 <- L1, T2 <- L2],
t_find_opaque_mismatch_list(List).
-t_find_opaque_mismatch_list([]) -> error;
+t_find_opaque_mismatch_list([]) -> throw(error);
t_find_opaque_mismatch_list([H|T]) ->
case H of
{ok, _T1, _T2} -> H;
@@ -741,16 +757,16 @@ decorate_tuples_in_sets([], _L, _Opaques, Acc) ->
-spec t_opaque_from_records(type_table()) -> [erl_type()].
-t_opaque_from_records(RecDict) ->
- OpaqueRecDict =
- dict:filter(fun(Key, _Value) ->
+t_opaque_from_records(RecMap) ->
+ OpaqueRecMap =
+ maps:filter(fun(Key, _Value) ->
case Key of
{opaque, _Name, _Arity} -> true;
_ -> false
end
- end, RecDict),
- OpaqueTypeDict =
- dict:map(fun({opaque, Name, _Arity},
+ end, RecMap),
+ OpaqueTypeMap =
+ maps:map(fun({opaque, Name, _Arity},
{{Module, _FileLine, _Form, ArgNames}, _Type}) ->
%% Args = args_to_types(ArgNames),
%% List = lists:zip(ArgNames, Args),
@@ -759,8 +775,8 @@ t_opaque_from_records(RecDict) ->
Rep = t_any(), % not used for anything right now
Args = [t_any() || _ <- ArgNames],
t_opaque(Module, Name, Args, Rep)
- end, OpaqueRecDict),
- [OpaqueType || {_Key, OpaqueType} <- dict:to_list(OpaqueTypeDict)].
+ end, OpaqueRecMap),
+ [OpaqueType || {_Key, OpaqueType} <- maps:to_list(OpaqueTypeMap)].
%% Decompose opaque instances of type arg2 to structured types, in arg1
%% XXX: Same as t_unopaque
@@ -794,10 +810,6 @@ list_struct_from_opaque(Types, Opaques) ->
[t_struct_from_opaque(Type, Opaques) || Type <- Types].
%%-----------------------------------------------------------------------------
-
--type mod_records() :: dict:dict(module(), type_table()).
-
-%%-----------------------------------------------------------------------------
%% Unit type. Signals non termination.
%%
@@ -2237,16 +2249,21 @@ t_has_var_list([]) -> false.
-spec t_collect_vars(erl_type()) -> [erl_type()].
t_collect_vars(T) ->
- t_collect_vars(T, []).
+ Vs = t_collect_vars(T, maps:new()),
+ [V || {V, _} <- maps:to_list(Vs)].
+
+-type ctab() :: #{erl_type() => 'any'}.
--spec t_collect_vars(erl_type(), [erl_type()]) -> [erl_type()].
+-spec t_collect_vars(erl_type(), ctab()) -> ctab().
t_collect_vars(?var(_) = Var, Acc) ->
- ordsets:add_element(Var, Acc);
+ maps:put(Var, any, Acc);
t_collect_vars(?function(Domain, Range), Acc) ->
- ordsets:union(t_collect_vars(Domain, Acc), t_collect_vars(Range, []));
+ Acc1 = t_collect_vars(Domain, Acc),
+ t_collect_vars(Range, Acc1);
t_collect_vars(?list(Contents, Termination, _), Acc) ->
- ordsets:union(t_collect_vars(Contents, Acc), t_collect_vars(Termination, []));
+ Acc1 = t_collect_vars(Contents, Acc),
+ t_collect_vars(Termination, Acc1);
t_collect_vars(?product(Types), Acc) ->
t_collect_vars_list(Types, Acc);
t_collect_vars(?tuple(?any, ?any, ?any), Acc) ->
@@ -3044,6 +3061,9 @@ inf_opaque_types(IsOpaque1, T1, IsOpaque2, T2, Opaques) ->
end
end.
+compatible_opaque_types(?opaque(Es1), ?opaque(Es2)) ->
+ [{O1, O2} || O1 <- Es1, O2 <- Es2, is_compat_opaque_names(O1, O2)].
+
is_compat_opaque_names(Opaque1, Opaque2) ->
#opaque{mod = Mod1, name = Name1, args = Args1} = Opaque1,
#opaque{mod = Mod2, name = Name2, args = Args2} = Opaque2,
@@ -3059,88 +3079,91 @@ is_compat_args([A1|Args1], [A2|Args2]) ->
is_compat_args([], []) -> true;
is_compat_args(_, _) -> false.
-is_compat_arg(A1, A2) ->
- is_specialization(A1, A2) orelse is_specialization(A2, A1).
-
--spec is_specialization(erl_type(), erl_type()) -> boolean().
-
-%% Returns true if the first argument is a specialization of the
-%% second argument in the sense that every type is a specialization of
-%% any(). For example, {_,_} is a specialization of any(), but not of
-%% tuple(). Does not handle variables, but any() and unions (sort of).
-
-is_specialization(T, T) -> true;
-is_specialization(_, ?any) -> true;
-is_specialization(?any, _) -> false;
-is_specialization(?function(Domain1, Range1), ?function(Domain2, Range2)) ->
- (is_specialization(Domain1, Domain2) andalso
- is_specialization(Range1, Range2));
-is_specialization(?list(Contents1, Termination1, Size1),
- ?list(Contents2, Termination2, Size2)) ->
+-spec is_compat_arg(erl_type(), erl_type()) -> boolean().
+
+%% The intention is that 'true' is to be returned iff one of the
+%% arguments is a specialization of the other argument in the sense
+%% that every type is a specialization of any(). For example, {_,_} is
+%% a specialization of any(), but not of tuple(). Does not handle
+%% variables, but any() and unions (sort of). However, the
+%% implementation is more relaxed as any() is compatible to anything.
+
+is_compat_arg(T, T) -> true;
+is_compat_arg(_, ?any) -> true;
+is_compat_arg(?any, _) -> true;
+is_compat_arg(?function(Domain1, Range1), ?function(Domain2, Range2)) ->
+ (is_compat_arg(Domain1, Domain2) andalso
+ is_compat_arg(Range1, Range2));
+is_compat_arg(?list(Contents1, Termination1, Size1),
+ ?list(Contents2, Termination2, Size2)) ->
(Size1 =:= Size2 andalso
- is_specialization(Contents1, Contents2) andalso
- is_specialization(Termination1, Termination2));
-is_specialization(?product(Types1), ?product(Types2)) ->
- specialization_list(Types1, Types2);
-is_specialization(?tuple(?any, ?any, ?any), ?tuple(_, _, _)) -> false;
-is_specialization(?tuple(_, _, _), ?tuple(?any, ?any, ?any)) -> false;
-is_specialization(?tuple(Elements1, Arity, _),
- ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
- specialization_list(Elements1, Elements2);
-is_specialization(?tuple_set([{Arity, List}]),
- ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
- specialization_list(sup_tuple_elements(List), Elements2);
-is_specialization(?tuple(Elements1, Arity, _),
- ?tuple_set([{Arity, List}])) when Arity =/= ?any ->
- specialization_list(Elements1, sup_tuple_elements(List));
-is_specialization(?tuple_set(List1), ?tuple_set(List2)) ->
+ is_compat_arg(Contents1, Contents2) andalso
+ is_compat_arg(Termination1, Termination2));
+is_compat_arg(?product(Types1), ?product(Types2)) ->
+ is_compat_list(Types1, Types2);
+is_compat_arg(?map(Pairs1, DefK1, DefV1), ?map(Pairs2, DefK2, DefV2)) ->
+ (is_compat_list(Pairs1, Pairs2) andalso
+ is_compat_arg(DefK1, DefK2) andalso
+ is_compat_arg(DefV1, DefV2));
+is_compat_arg(?tuple(?any, ?any, ?any), ?tuple(_, _, _)) -> false;
+is_compat_arg(?tuple(_, _, _), ?tuple(?any, ?any, ?any)) -> false;
+is_compat_arg(?tuple(Elements1, Arity, _),
+ ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
+ is_compat_list(Elements1, Elements2);
+is_compat_arg(?tuple_set([{Arity, List}]),
+ ?tuple(Elements2, Arity, _)) when Arity =/= ?any ->
+ is_compat_list(sup_tuple_elements(List), Elements2);
+is_compat_arg(?tuple(Elements1, Arity, _),
+ ?tuple_set([{Arity, List}])) when Arity =/= ?any ->
+ is_compat_list(Elements1, sup_tuple_elements(List));
+is_compat_arg(?tuple_set(List1), ?tuple_set(List2)) ->
try
- specialization_list_list([sup_tuple_elements(T) || {_Arity, T} <- List1],
- [sup_tuple_elements(T) || {_Arity, T} <- List2])
+ is_compat_list_list([sup_tuple_elements(T) || {_Arity, T} <- List1],
+ [sup_tuple_elements(T) || {_Arity, T} <- List2])
catch _:_ -> false
end;
-is_specialization(?opaque(_) = T1, T2) ->
- is_specialization(t_opaque_structure(T1), T2);
-is_specialization(T1, ?opaque(_) = T2) ->
- is_specialization(T1, t_opaque_structure(T2));
-is_specialization(?union(List1)=T1, ?union(List2)=T2) ->
- case specialization_union2(T1, T2) of
- {yes, Type1, Type2} -> is_specialization(Type1, Type2);
- no -> specialization_list(List1, List2)
+is_compat_arg(?opaque(_) = T1, T2) ->
+ is_compat_arg(t_opaque_structure(T1), T2);
+is_compat_arg(T1, ?opaque(_) = T2) ->
+ is_compat_arg(T1, t_opaque_structure(T2));
+is_compat_arg(?union(List1)=T1, ?union(List2)=T2) ->
+ case is_compat_union2(T1, T2) of
+ {yes, Type1, Type2} -> is_compat_arg(Type1, Type2);
+ no -> is_compat_list(List1, List2)
end;
-is_specialization(?union(List), T2) ->
+is_compat_arg(?union(List), T2) ->
case unify_union(List) of
- {yes, Type} -> is_specialization(Type, T2);
+ {yes, Type} -> is_compat_arg(Type, T2);
no -> false
end;
-is_specialization(T1, ?union(List)) ->
+is_compat_arg(T1, ?union(List)) ->
case unify_union(List) of
- {yes, Type} -> is_specialization(T1, Type);
+ {yes, Type} -> is_compat_arg(T1, Type);
no -> false
end;
-is_specialization(?var(_), _) -> exit(error);
-is_specialization(_, ?var(_)) -> exit(error);
-is_specialization(?none, _) -> false;
-is_specialization(_, ?none) -> false;
-is_specialization(?unit, _) -> false;
-is_specialization(_, ?unit) -> false;
-is_specialization(#c{}, #c{}) -> false.
+is_compat_arg(?var(_), _) -> exit(error);
+is_compat_arg(_, ?var(_)) -> exit(error);
+is_compat_arg(?none, _) -> false;
+is_compat_arg(_, ?none) -> false;
+is_compat_arg(?unit, _) -> false;
+is_compat_arg(_, ?unit) -> false;
+is_compat_arg(#c{}, #c{}) -> false.
-specialization_list_list(LL1, LL2) ->
- length(LL1) =:= length(LL2) andalso specialization_list_list1(LL1, LL2).
+is_compat_list_list(LL1, LL2) ->
+ length(LL1) =:= length(LL2) andalso is_compat_list_list1(LL1, LL2).
-specialization_list_list1([], []) -> true;
-specialization_list_list1([L1|LL1], [L2|LL2]) ->
- specialization_list(L1, L2) andalso specialization_list_list1(LL1, LL2).
+is_compat_list_list1([], []) -> true;
+is_compat_list_list1([L1|LL1], [L2|LL2]) ->
+ is_compat_list(L1, L2) andalso is_compat_list_list1(LL1, LL2).
-specialization_list(L1, L2) ->
- length(L1) =:= length(L2) andalso specialization_list1(L1, L2).
+is_compat_list(L1, L2) ->
+ length(L1) =:= length(L2) andalso is_compat_list1(L1, L2).
-specialization_list1([], []) -> true;
-specialization_list1([T1|L1], [T2|L2]) ->
- is_specialization(T1, T2) andalso specialization_list1(L1, L2).
+is_compat_list1([], []) -> true;
+is_compat_list1([T1|L1], [T2|L2]) ->
+ is_compat_arg(T1, T2) andalso is_compat_list1(L1, L2).
-specialization_union2(?union(List1)=T1, ?union(List2)=T2) ->
+is_compat_union2(?union(List1)=T1, ?union(List2)=T2) ->
case {unify_union(List1), unify_union(List2)} of
{{yes, Type1}, {yes, Type2}} -> {yes, Type1, Type2};
{{yes, Type1}, no} -> {yes, Type1, T2};
@@ -4173,7 +4196,7 @@ t_map(Fun, T) ->
-spec t_to_string(erl_type()) -> string().
t_to_string(T) ->
- t_to_string(T, dict:new()).
+ t_to_string(T, maps:new()).
-spec t_to_string(erl_type(), type_table()) -> string().
@@ -4423,9 +4446,17 @@ mod_name(Mod, Name) ->
-type site() :: {'type', mta()} | {'spec', mfa()} | {'record', mra()}.
-type cache_key() :: {module(), atom(), expand_depth(),
[erl_type()], type_names()}.
--opaque cache() :: #{cache_key() => {erl_type(), expand_limit()}}.
+-type mod_type_table() :: ets:tid().
+-record(cache,
+ {
+ types = maps:new() :: #{cache_key() => {erl_type(), expand_limit()}},
+ mod_recs = {mrecs, dict:new()} :: 'undefined'
+ | {'mrecs', mod_records()}
+ }).
--spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_records(),
+-opaque cache() :: #cache{}.
+
+-spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_type_table(),
var_table(), cache()) -> {erl_type(), cache()}.
t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) ->
@@ -4437,11 +4468,12 @@ t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) ->
t_from_form_without_remote(Form, Site, TypeTable) ->
Module = site_module(Site),
- RecDict = dict:from_list([{Module, TypeTable}]),
+ ModRecs = dict:from_list([{Module, TypeTable}]),
ExpTypes = replace_by_none,
VarTab = var_table__new(),
- Cache = cache__new(),
- t_from_form1(Form, ExpTypes, Site, RecDict, VarTab, Cache).
+ Cache0 = cache__new(),
+ Cache = Cache0#cache{mod_recs = {mrecs, ModRecs}},
+ t_from_form1(Form, ExpTypes, Site, undefined, VarTab, Cache).
%% REC_TYPE_LIMIT is used for limiting the depth of recursive types.
%% EXPAND_LIMIT is used for limiting the size of types by
@@ -4456,13 +4488,13 @@ t_from_form_without_remote(Form, Site, TypeTable) ->
-record(from_form, {site :: site(),
xtypes :: sets:set(mfa()) | 'replace_by_none',
- mrecs :: mod_records(),
+ mrecs :: 'undefined' | mod_type_table(),
vtab :: var_table(),
tnames :: type_names()}).
-spec t_from_form1(parse_form(), sets:set(mfa()) | 'replace_by_none',
- site(), mod_records(), var_table(), cache()) ->
- {erl_type(), cache()}.
+ site(), 'undefined' | mod_type_table(), var_table(),
+ cache()) -> {erl_type(), cache()}.
t_from_form1(Form, ET, Site, MR, V, C) ->
TypeNames = initial_typenames(Site),
@@ -4534,6 +4566,8 @@ from_form({atom, _L, Atom}, _S, _D, L, C) ->
{t_atom(Atom), L, C};
from_form({integer, _L, Int}, _S, _D, L, C) ->
{t_integer(Int), L, C};
+from_form({char, _L, Char}, _S, _D, L, C) ->
+ {t_integer(Char), L, C};
from_form({op, _L, _Op, _Arg} = Op, _S, _D, L, C) ->
case erl_eval:partial_eval(Op) of
{integer, _, Val} ->
@@ -4706,13 +4740,13 @@ from_form({opaque, _L, Name, {Mod, Args, Rep}}, _S, _D, L, C) ->
builtin_type(Name, Type, S, D, L, C) ->
#from_form{site = Site, mrecs = MR} = S,
M = site_module(Site),
- case dict:find(M, MR) of
- {ok, R} ->
+ case lookup_module_types(M, MR, C) of
+ {R, C1} ->
case lookup_type(Name, 0, R) of
{_, {{_M, _FL, _F, _A}, _T}} ->
- type_from_form(Name, [], S, D, L, C);
+ type_from_form(Name, [], S, D, L, C1);
error ->
- {Type, L, C}
+ {Type, L, C1}
end;
error ->
{Type, L, C}
@@ -4725,9 +4759,9 @@ type_from_form(Name, Args, S, D, L, C) ->
TypeName = {type, {Module, Name, ArgsLen}},
case can_unfold_more(TypeName, TypeNames) of
true ->
- {ok, R} = dict:find(Module, MR),
+ {R, C1} = lookup_module_types(Module, MR, C),
type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames,
- S, D, L, C);
+ S, D, L, C1);
false ->
{t_any(), L, C}
end.
@@ -4779,24 +4813,24 @@ remote_from_form(RemMod, Name, Args, S, D, L, C) ->
true ->
ArgsLen = length(Args),
MFA = {RemMod, Name, ArgsLen},
- case dict:find(RemMod, MR) of
+ case lookup_module_types(RemMod, MR, C) of
error ->
self() ! {self(), ext_types, MFA},
{t_any(), L, C};
- {ok, RemDict} ->
+ {RemDict, C1} ->
case sets:is_element(MFA, ET) of
true ->
RemType = {type, MFA},
case can_unfold_more(RemType, TypeNames) of
true ->
remote_from_form1(RemMod, Name, Args, ArgsLen, RemDict,
- RemType, TypeNames, S, D, L, C);
+ RemType, TypeNames, S, D, L, C1);
false ->
- {t_any(), L, C}
+ {t_any(), L, C1}
end;
false ->
self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
- {t_any(), L, C}
+ {t_any(), L, C1}
end
end
end.
@@ -4871,15 +4905,15 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) ->
case can_unfold_more(RecordType, TypeNames) of
true ->
M = site_module(Site),
- {ok, R} = dict:find(M, MR),
+ {R, C1} = lookup_module_types(M, MR, C),
case lookup_record(Name, R) of
{ok, DeclFields} ->
NewTypeNames = [RecordType|TypeNames],
Site1 = {record, {M, Name, length(DeclFields)}},
S1 = S#from_form{site = Site1, tnames = NewTypeNames},
Fun = fun(D, L) ->
- {GetModRec, L1, C1} =
- get_mod_record(ModFields, DeclFields, S1, D, L, C),
+ {GetModRec, L1, C2} =
+ get_mod_record(ModFields, DeclFields, S1, D, L, C1),
case GetModRec of
{error, FieldName} ->
throw({error,
@@ -4887,12 +4921,12 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) ->
[Name, FieldName])});
{ok, NewFields} ->
S2 = S1#from_form{vtab = var_table__new()},
- {NewFields1, L2, C2} =
- fields_from_form(NewFields, S2, D, L1, C1),
+ {NewFields1, L2, C3} =
+ fields_from_form(NewFields, S2, D, L1, C2),
Rec = t_tuple(
[t_atom(Name)|[Type
|| {_FieldName, Type} <- NewFields1]]),
- {Rec, L2, C2}
+ {Rec, L2, C3}
end
end,
recur_limit(Fun, D0, L0, RecordType, TypeNames);
@@ -5023,7 +5057,7 @@ recur_limit(Fun, D, L, TypeName, TypeNames) ->
end.
-spec t_check_record_fields(parse_form(), sets:set(mfa()), site(),
- mod_records(), var_table(), cache()) -> cache().
+ mod_type_table(), var_table(), cache()) -> cache().
t_check_record_fields(Form, ExpTypes, Site, RecDict, VarTable, Cache) ->
State = #from_form{site = Site,
@@ -5048,6 +5082,7 @@ check_record_fields({remote_type, _L, [{atom, _, _}, {atom, _, _}, Args]},
list_check_record_fields(Args, S, C);
check_record_fields({atom, _L, _}, _S, C) -> C;
check_record_fields({integer, _L, _}, _S, C) -> C;
+check_record_fields({char, _L, _}, _S, C) -> C;
check_record_fields({op, _L, _Op, _Arg}, _S, C) -> C;
check_record_fields({op, _L, _Op, _Arg1, _Arg2}, _S, C) -> C;
check_record_fields({type, _L, tuple, any}, _S, C) -> C;
@@ -5066,13 +5101,13 @@ check_record_fields({user_type, _L, _Name, Args}, S, C) ->
check_record({atom, _, Name}, ModFields, S, C) ->
#from_form{site = Site, mrecs = MR} = S,
M = site_module(Site),
- {ok, R} = dict:find(M, MR),
+ {R, C1} = lookup_module_types(M, MR, C),
{ok, DeclFields} = lookup_record(Name, R),
- case check_fields(Name, ModFields, DeclFields, S, C) of
+ case check_fields(Name, ModFields, DeclFields, S, C1) of
{error, FieldName} ->
throw({error, io_lib:format("Illegal declaration of #~w{~w}\n",
[Name, FieldName])});
- C1 -> C1
+ C2 -> C2
end.
check_fields(RecName, [{type, _, field_type, [{atom, _, Name}, Abstr]}|Left],
@@ -5102,7 +5137,7 @@ site_module({_, {Module, _, _}}) ->
-spec cache__new() -> cache().
cache__new() ->
- maps:new().
+ #cache{}.
-spec cache_key(module(), atom(), [erl_type()],
type_names(), expand_depth()) -> cache_key().
@@ -5119,8 +5154,8 @@ cache_key(Module, Name, ArgTypes, TypeNames, D) ->
-spec cache_find(cache_key(), cache()) ->
{erl_type(), expand_limit()} | 'error'.
-cache_find(Key, Cache) ->
- case maps:find(Key, Cache) of
+cache_find(Key, #cache{types = Types}) ->
+ case maps:find(Key, Types) of
{ok, Value} ->
Value;
error ->
@@ -5132,8 +5167,9 @@ cache_find(Key, Cache) ->
cache_put(_Key, _Type, DeltaL, Cache) when DeltaL < 0 ->
%% The type is truncated; do not reuse it.
Cache;
-cache_put(Key, Type, DeltaL, Cache) ->
- maps:put(Key, {Type, DeltaL}, Cache).
+cache_put(Key, Type, DeltaL, #cache{types = Types} = Cache) ->
+ NewTypes = maps:put(Key, {Type, DeltaL}, Types),
+ Cache#cache{types = NewTypes}.
-spec t_var_names([erl_type()]) -> [atom()].
@@ -5149,6 +5185,7 @@ t_form_to_string({var, _L, Name}) -> atom_to_list(Name);
t_form_to_string({atom, _L, Atom}) ->
io_lib:write_string(atom_to_list(Atom), $'); % To quote or not to quote... '
t_form_to_string({integer, _L, Int}) -> integer_to_list(Int);
+t_form_to_string({char, _L, Char}) -> integer_to_list(Char);
t_form_to_string({op, _L, _Op, _Arg} = Op) ->
case erl_eval:partial_eval(Op) of
{integer, _, _} = Int -> t_form_to_string(Int);
@@ -5231,14 +5268,12 @@ t_form_to_string({type, _L, union, Args}) ->
t_form_to_string({type, _L, Name, []} = T) ->
try
M = mod,
- D0 = dict:new(),
- MR = dict:from_list([{M, D0}]),
Site = {type, {M,Name,0}},
V = var_table__new(),
C = cache__new(),
State = #from_form{site = Site,
xtypes = sets:new(),
- mrecs = MR,
+ mrecs = 'undefined',
vtab = V,
tnames = []},
{T1, _, _} = from_form(T, State, _Deep=1000, _ALot=1000000, C),
@@ -5292,11 +5327,33 @@ is_erl_type(?unit) -> true;
is_erl_type(#c{}) -> true;
is_erl_type(_) -> false.
+-spec lookup_module_types(module(), mod_type_table(), cache()) ->
+ 'error' | {type_table(), cache()}.
+
+lookup_module_types(Module, CodeTable, Cache) ->
+ #cache{mod_recs = ModRecs} = Cache,
+ case ModRecs of
+ undefined -> error;
+ {mrecs, MRecs} ->
+ case dict:find(Module, MRecs) of
+ {ok, R} ->
+ {R, Cache};
+ error ->
+ try ets:lookup_element(CodeTable, Module, 2) of
+ R ->
+ NewMRecs = dict:store(Module, R, MRecs),
+ {R, Cache#cache{mod_recs = {mrecs, NewMRecs}}}
+ catch
+ _:_ -> error
+ end
+ end
+ end.
+
-spec lookup_record(atom(), type_table()) ->
'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
-lookup_record(Tag, RecDict) when is_atom(Tag) ->
- case dict:find({record, Tag}, RecDict) of
+lookup_record(Tag, Table) when is_atom(Tag) ->
+ case maps:find({record, Tag}, Table) of
{ok, {_FileLine, [{_Arity, Fields}]}} ->
{ok, Fields};
{ok, {_FileLine, List}} when is_list(List) ->
@@ -5310,18 +5367,18 @@ lookup_record(Tag, RecDict) when is_atom(Tag) ->
-spec lookup_record(atom(), arity(), type_table()) ->
'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
-lookup_record(Tag, Arity, RecDict) when is_atom(Tag) ->
- case dict:find({record, Tag}, RecDict) of
+lookup_record(Tag, Arity, Table) when is_atom(Tag) ->
+ case maps:find({record, Tag}, Table) of
{ok, {_FileLine, [{Arity, Fields}]}} -> {ok, Fields};
{ok, {_FileLine, OrdDict}} -> orddict:find(Arity, OrdDict);
error -> error
end.
-spec lookup_type(_, _, _) -> {'type' | 'opaque', type_value()} | 'error'.
-lookup_type(Name, Arity, RecDict) ->
- case dict:find({type, Name, Arity}, RecDict) of
+lookup_type(Name, Arity, Table) ->
+ case maps:find({type, Name, Arity}, Table) of
error ->
- case dict:find({opaque, Name, Arity}, RecDict) of
+ case maps:find({opaque, Name, Arity}, Table) of
error -> error;
{ok, Found} -> {opaque, Found}
end;
@@ -5331,8 +5388,8 @@ lookup_type(Name, Arity, RecDict) ->
-spec type_is_defined('type' | 'opaque', atom(), arity(), type_table()) ->
boolean().
-type_is_defined(TypeOrOpaque, Name, Arity, RecDict) ->
- dict:is_key({TypeOrOpaque, Name, Arity}, RecDict).
+type_is_defined(TypeOrOpaque, Name, Arity, Table) ->
+ maps:is_key({TypeOrOpaque, Name, Arity}, Table).
cannot_have_opaque(Type, TypeName, TypeNames) ->
t_is_none(Type) orelse is_recursive(TypeName, TypeNames).
diff --git a/lib/hipe/doc/src/notes.xml b/lib/hipe/doc/src/notes.xml
index 0bdd60adfd..314fd55ba3 100644
--- a/lib/hipe/doc/src/notes.xml
+++ b/lib/hipe/doc/src/notes.xml
@@ -1297,7 +1297,7 @@
<list>
<item>
<p>
- Miscellanous updates.</p>
+ Miscellaneous updates.</p>
<p>
Own Id: OTP-8038</p>
</item>
diff --git a/lib/hipe/flow/cfg.inc b/lib/hipe/flow/cfg.inc
index 362c5b697c..17342d3b60 100644
--- a/lib/hipe/flow/cfg.inc
+++ b/lib/hipe/flow/cfg.inc
@@ -212,7 +212,7 @@ info_update(CFG, I) ->
-ifndef(GEN_CFG).
-spec other_entrypoints(cfg()) -> [cfg_lbl()].
-%% @doc Returns a list of labels that are refered to from the data section.
+%% @doc Returns a list of labels that are referred to from the data section.
other_entrypoints(CFG) ->
hipe_consttab:referred_labels(data(CFG)).
diff --git a/lib/hipe/flow/ebb.inc b/lib/hipe/flow/ebb.inc
index 58213e44d5..e4b7fd0efb 100644
--- a/lib/hipe/flow/ebb.inc
+++ b/lib/hipe/flow/ebb.inc
@@ -40,12 +40,14 @@
%% | {ebb_leaf, SuccesorLabel}
%%--------------------------------------------------------------------
-%% XXX: Cheating big time! no recursive types
--type ebb() :: {ebb_node, icode_lbl(), _}
- | {ebb_leaf, icode_lbl()}.
+-type ebb() :: ebb_node()
+ | ebb_leaf().
-record(ebb_node, {label :: icode_lbl(), successors :: [ebb()]}).
+-type ebb_node() :: #ebb_node{}.
+
-record(ebb_leaf, {successor :: icode_lbl()}).
+-type ebb_leaf() :: #ebb_leaf{}.
%%--------------------------------------------------------------------
%% Returns a list of extended basic blocks.
@@ -193,7 +195,7 @@ add_succ([Lbl|Lbls], Visited, Node, MkFun, EBBs, CFG) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
--spec mk_node(icode_lbl(), [ebb()]) -> #ebb_node{}.
+-spec mk_node(icode_lbl(), [ebb()]) -> ebb_node().
mk_node(Label, Successors) -> #ebb_node{label=Label, successors=Successors}.
-spec node_label(#ebb_node{}) -> icode_lbl().
@@ -202,11 +204,11 @@ node_label(#ebb_node{label=Label}) -> Label.
-spec node_successors(#ebb_node{}) -> [ebb()].
node_successors(#ebb_node{successors=Successors}) -> Successors.
--spec mk_leaf(icode_lbl()) -> #ebb_leaf{}.
+-spec mk_leaf(icode_lbl()) -> ebb_leaf().
mk_leaf(NextEbb) -> #ebb_leaf{successor=NextEbb}.
%% leaf_next(Leaf) -> Leaf#ebb_leaf.successor.
--spec type(#ebb_node{}) -> 'node' ; (#ebb_leaf{}) -> 'leaf'.
+-spec type(ebb_node()) -> 'node' ; (ebb_leaf()) -> 'leaf'.
type(#ebb_node{}) -> node;
type(#ebb_leaf{}) -> leaf.
diff --git a/lib/hipe/flow/hipe_dominators.erl b/lib/hipe/flow/hipe_dominators.erl
index 570452c14e..749edd4f72 100644
--- a/lib/hipe/flow/hipe_dominators.erl
+++ b/lib/hipe/flow/hipe_dominators.erl
@@ -317,7 +317,7 @@ updateCell(Value, Field, WD) ->
%%>----------------------------------------------------------------------<
%% Procedure : dfs/1
%% Purpose : The main purpose of this function is to traverse the CFG in
-%% a depth first order. It is aslo used to initialize certain
+%% a depth first order. It is also used to initialize certain
%% elements defined in a workDataCell.
%% Arguments : CFG - a Control Flow Graph representation
%% Returns : A table (WorkData) and the total number of elements in
diff --git a/lib/hipe/icode/hipe_beam_to_icode.erl b/lib/hipe/icode/hipe_beam_to_icode.erl
index 100bc0b0e2..610578dfbc 100644
--- a/lib/hipe/icode/hipe_beam_to_icode.erl
+++ b/lib/hipe/icode/hipe_beam_to_icode.erl
@@ -148,7 +148,8 @@ trans_mfa_code(M,F,A, FunBeamCode, ClosureInfo) ->
{Code3,_Env3} = mk_debug_calltrace(MFA, Env1, Code2),
{Code3,_Env3} = {Code2,Env1}),
%% For stack optimization
- Leafness = leafness(Code3),
+ IsClosure = get_closure_info(MFA, ClosureInfo) =/= not_a_closure,
+ Leafness = leafness(Code3, IsClosure),
IsLeaf = is_leaf_code(Leafness),
Code4 =
[FunLbl |
@@ -156,7 +157,6 @@ trans_mfa_code(M,F,A, FunBeamCode, ClosureInfo) ->
false -> Code3;
true -> [mk_redtest()|Code3]
end],
- IsClosure = get_closure_info(MFA, ClosureInfo) =/= not_a_closure,
Code5 = hipe_icode:mk_icode(MFA, FunArgs, IsClosure, IsLeaf,
remove_dead_code(Code4),
hipe_gensym:var_range(icode),
@@ -173,12 +173,12 @@ trans_mfa_code(M,F,A, FunBeamCode, ClosureInfo) ->
mk_redtest() -> hipe_icode:mk_primop([], redtest, []).
-leafness(Is) -> % -> true, selfrec, or false
- leafness(Is, true).
+leafness(Is, IsClosure) -> % -> true, selfrec, closure, or false
+ leafness(Is, IsClosure, true).
-leafness([], Leafness) ->
+leafness([], _IsClosure, Leafness) ->
Leafness;
-leafness([I|Is], Leafness) ->
+leafness([I|Is], IsClosure, Leafness) ->
case I of
#icode_comment{} ->
%% BEAM self-tailcalls become gotos, but they leave
@@ -191,7 +191,7 @@ leafness([I|Is], Leafness) ->
'self_tail_recursive' -> selfrec; % call_only to selfrec
_ -> Leafness
end,
- leafness(Is, NewLeafness);
+ leafness(Is, IsClosure, NewLeafness);
#icode_call{} ->
case hipe_icode:call_type(I) of
'primop' ->
@@ -199,12 +199,12 @@ leafness([I|Is], Leafness) ->
call_fun -> false; % Calls closure
enter_fun -> false; % Calls closure
#apply_N{} -> false;
- _ -> leafness(Is, Leafness) % Other primop calls are ok
+ _ -> leafness(Is, IsClosure, Leafness) % Other primop calls are ok
end;
T when T =:= 'local' orelse T =:= 'remote' ->
{M,F,A} = hipe_icode:call_fun(I),
case erlang:is_builtin(M, F, A) of
- true -> leafness(Is, Leafness);
+ true -> leafness(Is, IsClosure, Leafness);
false -> false
end
end;
@@ -223,11 +223,12 @@ leafness([I|Is], Leafness) ->
T when T =:= 'local' orelse T =:= 'remote' ->
{M,F,A} = hipe_icode:enter_fun(I),
case erlang:is_builtin(M, F, A) of
- true -> leafness(Is, Leafness);
+ true -> leafness(Is, IsClosure, Leafness);
+ _ when IsClosure -> leafness(Is, IsClosure, closure);
_ -> false
end
end;
- _ -> leafness(Is, Leafness)
+ _ -> leafness(Is, IsClosure, Leafness)
end.
%% XXX: this old stuff is passed around but essentially unused
@@ -235,12 +236,20 @@ is_leaf_code(Leafness) ->
case Leafness of
true -> true;
selfrec -> true;
+ closure -> false;
false -> false
end.
needs_redtest(Leafness) ->
case Leafness of
true -> false;
+ %% A "leaf" closure may contain tailcalls to non-closures in addition to
+ %% what other leaves may contain. Omitting the redtest is useful to generate
+ %% shorter code for closures generated by (fun F/A), and is safe since
+ %% control flow cannot return to a "leaf" closure again without a reduction
+ %% being consumed. This is true since no function that can call a closure
+ %% will ever have its redtest omitted.
+ closure -> false;
selfrec -> true;
false -> true
end.
diff --git a/lib/hipe/icode/hipe_icode_type.erl b/lib/hipe/icode/hipe_icode_type.erl
index 815d1e57a8..aafaeb5a0a 100644
--- a/lib/hipe/icode/hipe_icode_type.erl
+++ b/lib/hipe/icode/hipe_icode_type.erl
@@ -1410,9 +1410,10 @@ transform_element2(I) ->
NewIndex =
case test_type(integer, IndexType) of
true ->
- case t_number_vals(IndexType) of
- unknown -> unknown;
- [_|_] = Vals -> {number, Vals}
+ case {number_min(IndexType), number_max(IndexType)} of
+ {Lb0, Ub0} when is_integer(Lb0), is_integer(Ub0) ->
+ {number, Lb0, Ub0};
+ {_, _} -> unknown
end;
_ -> unknown
end,
@@ -1427,19 +1428,19 @@ transform_element2(I) ->
_ -> unknown
end,
case {NewIndex, MinSize} of
- {{number, [_|_] = Ns}, {tuple, A}} when is_integer(A) ->
- case lists:all(fun(X) -> 0 < X andalso X =< A end, Ns) of
+ {{number, Lb, Ub}, {tuple, A}} when is_integer(A) ->
+ case 0 < Lb andalso Ub =< A of
true ->
- case Ns of
- [Idx] ->
+ case {Lb, Ub} of
+ {Idx, Idx} ->
[_, Tuple] = hipe_icode:args(I),
update_call_or_enter(I, #unsafe_element{index = Idx}, [Tuple]);
- [_|_] ->
+ {_, _} ->
NewFun = {element, [MinSize, valid]},
update_call_or_enter(I, NewFun)
end;
false ->
- case lists:all(fun(X) -> hipe_tagscheme:is_fixnum(X) end, Ns) of
+ case lists:all(fun(X) -> hipe_tagscheme:is_fixnum(X) end, [Lb, Ub]) of
true ->
NewFun = {element, [MinSize, fixnums]},
update_call_or_enter(I, NewFun);
@@ -1454,7 +1455,7 @@ transform_element2(I) ->
NewFun = {element, [MinSize, fixnums]},
update_call_or_enter(I, NewFun);
false ->
- NewFun = {element, [MinSize, NewIndex]},
+ NewFun = {element, [MinSize, NewIndex]},
update_call_or_enter(I, NewFun)
end
end.
diff --git a/lib/hipe/llvm/hipe_llvm.erl b/lib/hipe/llvm/hipe_llvm.erl
index b22f8fb320..641d3fda0a 100644
--- a/lib/hipe/llvm/hipe_llvm.erl
+++ b/lib/hipe/llvm/hipe_llvm.erl
@@ -862,7 +862,7 @@ pp_ins(Dev, Ver, I) ->
true -> write(Dev, "volatile ");
false -> ok
end,
- pp_dereference_type(Dev, Ver, load_p_type(I)),
+ pp_dereference_type(Dev, load_p_type(I)),
write(Dev, [" ", load_pointer(I), " "]),
case load_alignment(I) of
[] -> ok;
@@ -898,7 +898,7 @@ pp_ins(Dev, Ver, I) ->
true -> write(Dev, "inbounds ");
false -> ok
end,
- pp_dereference_type(Dev, Ver, getelementptr_p_type(I)),
+ pp_dereference_type(Dev, getelementptr_p_type(I)),
write(Dev, [" ", getelementptr_value(I)]),
pp_typed_idxs(Dev, getelementptr_typed_idxs(I)),
write(Dev, "\n");
@@ -959,10 +959,8 @@ pp_ins(Dev, Ver, I) ->
pp_args(Dev, fun_def_arglist(I)),
write(Dev, ") "),
pp_options(Dev, fun_def_fn_attrs(I)),
- case Ver >= {3,7} of false -> ok; true ->
- write(Dev, "personality i32 (i32, i64, i8*,i8*)* "
- "@__gcc_personality_v0 ")
- end,
+ write(Dev, "personality i32 (i32, i64, i8*,i8*)* "
+ "@__gcc_personality_v0 "),
case fun_def_align(I) of
[] -> ok;
N -> write(Dev, ["align ", N])
@@ -997,12 +995,7 @@ pp_ins(Dev, Ver, I) ->
pp_type(Dev, const_decl_type(I)),
write(Dev, [" ", const_decl_value(I), "\n"]);
#llvm_landingpad{} ->
- write(Dev, "landingpad { i8*, i32 } "),
- case Ver < {3,7} of false -> ok; true ->
- write(Dev, "personality i32 (i32, i64, i8*,i8*)* "
- "@__gcc_personality_v0 ")
- end,
- write(Dev, "cleanup\n");
+ write(Dev, "landingpad { i8*, i32 } cleanup\n");
#llvm_asm{} ->
write(Dev, [asm_instruction(I), "\n"]);
#llvm_adj_stack{} ->
@@ -1011,15 +1004,7 @@ pp_ins(Dev, Ver, I) ->
pp_type(Dev, adj_stack_type(I)),
write(Dev, [" ", adj_stack_offset(I),")\n"]);
#llvm_meta{} ->
- write(Dev, ["!", meta_id(I), " = "]),
- Named = case string:to_integer(meta_id(I)) of
- {_, ""} -> false;
- _ -> true
- end,
- case Ver < {3,6} andalso not Named of
- true -> write(Dev, "metadata !{metadata ");
- false -> write(Dev, "!{ ")
- end,
+ write(Dev, ["!", meta_id(I), " = !{ "]),
write(Dev, string:join([if is_list(Op) -> ["!\"", Op, "\""];
is_integer(Op) -> ["i32 ", integer_to_list(Op)];
is_record(Op, llvm_meta) ->
@@ -1030,15 +1015,10 @@ pp_ins(Dev, Ver, I) ->
exit({?MODULE, pp_ins, {"Unknown LLVM instruction", Other}})
end.
-%% @doc Print the type of a dereference in an LLVM instruction using syntax
-%% parsable by the specified LLVM version.
-pp_dereference_type(Dev, Ver, Type) ->
- case Ver >= {3,7} of
- false -> ok;
- true ->
- pp_type(Dev, pointer_type(Type)),
- write(Dev, ", ")
- end,
+%% @doc Print the type of a dereference in an LLVM instruction.
+pp_dereference_type(Dev, Type) ->
+ pp_type(Dev, pointer_type(Type)),
+ write(Dev, ", "),
pp_type(Dev, Type).
%% @doc Pretty-print a list of types
diff --git a/lib/hipe/llvm/hipe_rtl_to_llvm.erl b/lib/hipe/llvm/hipe_rtl_to_llvm.erl
index f8911c1909..79e1bfd381 100644
--- a/lib/hipe/llvm/hipe_rtl_to_llvm.erl
+++ b/lib/hipe/llvm/hipe_rtl_to_llvm.erl
@@ -1364,7 +1364,7 @@ create_function_definition(Fun, Params, Code, LocalVars) ->
EntryBlock =
lists:flatten([EntryLabel, ExceptionSync, I2, LocalVars, StoredParams, I3]),
Final_Code = EntryBlock ++ Code,
- FunctionOptions = [nounwind, noredzone, list_to_atom("gc \"erlang\"")],
+ FunctionOptions = [nounwind, noredzone, 'gc "erlang"'],
WordTy = hipe_llvm:mk_int(?BITS_IN_WORD),
FunRetTy = hipe_llvm:mk_struct(lists:duplicate(?NR_PINNED_REGS + 1, WordTy)),
hipe_llvm:mk_fun_def([], [], "cc 11", [], FunRetTy, FunctionName, Args,
@@ -1431,7 +1431,7 @@ relocs_to_list(Relocs) ->
%% constants/labels.
handle_relocations(Relocs, Data, Fun) ->
RelocsList = relocs_to_list(Relocs),
- %% Seperate Relocations according to their type
+ %% Separate Relocations according to their type
{CallList, AtomList, ClosureList, ClosureLabels, SwitchList} =
seperate_relocs(RelocsList),
%% Create code to declare atoms
@@ -1474,7 +1474,7 @@ handle_relocations(Relocs, Data, Fun) ->
LocalVariables = AtomLoad ++ ClosureLoad ++ ConstLoad,
{Relocs4, ExternalDeclarations, LocalVariables}.
-%% @doc Seperate relocations according to their type.
+%% @doc Separate relocations according to their type.
seperate_relocs(Relocs) ->
seperate_relocs(Relocs, [], [], [], [], []).
diff --git a/lib/hipe/main/hipe.erl b/lib/hipe/main/hipe.erl
index 90ef84ca51..fff397b060 100644
--- a/lib/hipe/main/hipe.erl
+++ b/lib/hipe/main/hipe.erl
@@ -441,7 +441,7 @@ compile(Name, File, Opts0) when is_atom(Name) ->
?error_msg("Cannot get Core Erlang code from BEAM binary.",[]),
?EXIT({cant_compile_core_from_binary});
true ->
- case filename:find_src(filename:rootname(File, ".beam")) of
+ case filelib:find_source(filename:rootname(File,".beam") ++ ".beam") of
{error, _} ->
?error_msg("Cannot find source code for ~p.", [File]),
?EXIT({cant_find_source_code});
@@ -655,7 +655,7 @@ run_compiler_1(Name, DisasmFun, IcodeFun, Options) ->
case proplists:get_bool(to_llvm, Opts0) andalso
not llvm_support_available() of
true ->
- ?error_msg("No LLVM version 3.4 or greater "
+ ?error_msg("No LLVM version 3.9 or greater "
"found in $PATH; aborting "
"native code compilation.\n", []),
?EXIT(cant_find_required_llvm_version);
@@ -1585,7 +1585,7 @@ check_options(Opts) ->
-spec llvm_support_available() -> boolean().
llvm_support_available() ->
- get_llvm_version() >= {3,4}.
+ get_llvm_version() >= {3,9}.
-type llvm_version() :: {Major :: integer(), Minor :: integer()}.
diff --git a/lib/hipe/opt/hipe_schedule.erl b/lib/hipe/opt/hipe_schedule.erl
index 531690f885..0f25940e3d 100644
--- a/lib/hipe/opt/hipe_schedule.erl
+++ b/lib/hipe/opt/hipe_schedule.erl
@@ -1337,10 +1337,10 @@ cd([{N,I}|Xs], DAG, PrevBr, PrevUnsafe, PrevOthers) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Function : cd_branch_to_other_deps
%% Argument : N - index of branch
-%% Ms - list of indexes of "others" preceeding instrs
+%% Ms - list of indexes of "others" preceding instrs
%% DAG - dependence graph
%% Returns : DAG - new graph
-%% Description : Makes preceeding instrs fixed so they don't bypass a branch
+%% Description : Makes preceding instrs fixed so they don't bypass a branch
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
cd_branch_to_other_deps(_, [], DAG) ->
DAG;
diff --git a/lib/hipe/opt/hipe_spillmin_color.erl b/lib/hipe/opt/hipe_spillmin_color.erl
index 50e073a467..41f1972df7 100644
--- a/lib/hipe/opt/hipe_spillmin_color.erl
+++ b/lib/hipe/opt/hipe_spillmin_color.erl
@@ -119,7 +119,7 @@ color_heuristic(IG, Min, Max, Safe, MaxNodes, Target, MaxDepth) ->
end;
_ ->
%% This can be increased from 2, and by this the heuristic can be
- %% exited earlier, but the same can be achived by decreasing the
+ %% exited earlier, but the same can be achieved by decreasing the
%% recursion depth. This should not be decreased below 2.
case (Max - Min) < 2 of
true ->
diff --git a/lib/hipe/regalloc/hipe_amd64_specific_sse2.erl b/lib/hipe/regalloc/hipe_amd64_specific_sse2.erl
index 9c94539bc6..9682d37520 100644
--- a/lib/hipe/regalloc/hipe_amd64_specific_sse2.erl
+++ b/lib/hipe/regalloc/hipe_amd64_specific_sse2.erl
@@ -126,8 +126,8 @@ temp0(_) ->
all_precoloured(Ctx) ->
allocatable(Ctx).
-is_precoloured(Reg, Ctx) ->
- lists:member(Reg,all_precoloured(Ctx)).
+is_precoloured(Reg, _) ->
+ hipe_amd64_registers:is_precoloured_sse2(Reg).
physical_name(Reg, _) ->
Reg.
diff --git a/lib/hipe/rtl/hipe_icode2rtl.erl b/lib/hipe/rtl/hipe_icode2rtl.erl
index 82970f04ab..6da8a76d34 100644
--- a/lib/hipe/rtl/hipe_icode2rtl.erl
+++ b/lib/hipe/rtl/hipe_icode2rtl.erl
@@ -532,8 +532,12 @@ gen_cond(CondOp, Args, TrueLbl, FalseLbl, Pred) ->
FalseLbl, Pred)];
'=:=' ->
[Arg1, Arg2] = Args,
+ TypeTestLbl = hipe_rtl:mk_new_label(),
[hipe_rtl:mk_branch(Arg1, eq, Arg2, TrueLbl,
- hipe_rtl:label_name(GenLbl), Pred),
+ hipe_rtl:label_name(TypeTestLbl), Pred),
+ TypeTestLbl,
+ hipe_tagscheme:test_either_immed(Arg1, Arg2, FalseLbl,
+ hipe_rtl:label_name(GenLbl)),
GenLbl,
hipe_rtl:mk_call([Tmp], op_exact_eqeq_2, Args,
TestRetName, [], not_remote),
@@ -546,8 +550,12 @@ gen_cond(CondOp, Args, TrueLbl, FalseLbl, Pred) ->
TrueLbl, 1-Pred)];
'=/=' ->
[Arg1, Arg2] = Args,
+ TypeTestLbl = hipe_rtl:mk_new_label(),
[hipe_rtl:mk_branch(Arg1, eq, Arg2, FalseLbl,
- hipe_rtl:label_name(GenLbl), 1-Pred),
+ hipe_rtl:label_name(TypeTestLbl), 1-Pred),
+ TypeTestLbl,
+ hipe_tagscheme:test_either_immed(Arg1, Arg2, TrueLbl,
+ hipe_rtl:label_name(GenLbl)),
GenLbl,
hipe_rtl:mk_call([Tmp], op_exact_eqeq_2, Args,
TestRetName, [], not_remote),
diff --git a/lib/hipe/rtl/hipe_rtl_binary_construct.erl b/lib/hipe/rtl/hipe_rtl_binary_construct.erl
index fd0d1f1223..52ea5db382 100644
--- a/lib/hipe/rtl/hipe_rtl_binary_construct.erl
+++ b/lib/hipe/rtl/hipe_rtl_binary_construct.erl
@@ -137,43 +137,6 @@ gen_rtl(BsOP, Dst, Args, TrueLblName, FalseLblName, SystemLimitLblName, ConstTab
end
end;
- {bs_put_integer, Size, Flags, ConstInfo} ->
- Aligned = aligned(Flags),
- LittleEndian = littleendian(Flags),
- [NewOffset] = get_real(Dst),
- case is_illegal_const(Size) of
- true ->
- [hipe_rtl:mk_goto(FalseLblName)];
- false ->
- case ConstInfo of
- fail ->
- [hipe_rtl:mk_goto(FalseLblName)];
- _ ->
- case Args of
- [Src, Base, Offset] ->
- CCode = static_int_c_code(NewOffset, Src,
- Base, Offset, Size,
- Flags, TrueLblName,
- FalseLblName),
- put_static_int(NewOffset, Src, Base, Offset, Size,
- CCode, Aligned, LittleEndian, TrueLblName);
- [Src, Bits, Base, Offset] ->
- {SizeCode, SizeReg} =
- hipe_rtl_binary:make_size(Size, Bits,
- SystemLimitLblName,
- FalseLblName),
- CCode = int_c_code(NewOffset, Src, Base,
- Offset, SizeReg, Flags,
- TrueLblName, FalseLblName),
- InCode =
- put_dynamic_int(NewOffset, Src, Base, Offset,
- SizeReg, CCode, Aligned,
- LittleEndian, TrueLblName),
- SizeCode ++ InCode
- end
- end
- end;
-
{unsafe_bs_put_integer, 0, _Flags, _ConstInfo} ->
[NewOffset] = get_real(Dst),
case Args of
@@ -186,44 +149,12 @@ gen_rtl(BsOP, Dst, Args, TrueLblName, FalseLblName, SystemLimitLblName, ConstTab
end;
{unsafe_bs_put_integer, Size, Flags, ConstInfo} ->
- case is_illegal_const(Size) of
- true ->
- [hipe_rtl:mk_goto(FalseLblName)];
- false ->
- Aligned = aligned(Flags),
- LittleEndian = littleendian(Flags),
- [NewOffset] = get_real(Dst),
- case ConstInfo of
- fail ->
- [hipe_rtl:mk_goto(FalseLblName)];
- _ ->
- case Args of
- [Src, Base, Offset] ->
- CCode = static_int_c_code(NewOffset, Src,
- Base, Offset, Size,
- Flags, TrueLblName,
- FalseLblName),
- put_unsafe_static_int(NewOffset, Src, Base,
- Offset, Size,
- CCode, Aligned, LittleEndian,
- TrueLblName);
- [Src, Bits, Base, Offset] ->
- {SizeCode, SizeReg} =
- hipe_rtl_binary:make_size(Size, Bits,
- SystemLimitLblName,
- FalseLblName),
- CCode = int_c_code(NewOffset, Src, Base,
- Offset, SizeReg, Flags,
- TrueLblName, FalseLblName),
- InCode =
- put_unsafe_dynamic_int(NewOffset, Src, Base,
- Offset, SizeReg, CCode,
- Aligned, LittleEndian,
- TrueLblName),
- SizeCode ++ InCode
- end
- end
- end;
+ do_bs_put_integer(Dst, Args, Size, Flags, ConstInfo, true,
+ TrueLblName, FalseLblName, SystemLimitLblName);
+
+ {bs_put_integer, Size, Flags, ConstInfo} ->
+ do_bs_put_integer(Dst, Args, Size, Flags, ConstInfo, false,
+ TrueLblName, FalseLblName, SystemLimitLblName);
bs_utf8_size ->
case Dst of
@@ -360,6 +291,40 @@ gen_rtl(BsOP, Dst, Args, TrueLblName, FalseLblName, SystemLimitLblName, ConstTab
{Code, ConstTab}
end.
+%% Common implementation of bs_put_integer and unsafe_bs_put_integer
+do_bs_put_integer(Dst, Args, Size, Flags, ConstInfo, SrcUnsafe,
+ TrueLblName, FalseLblName, SystemLimitLblName) ->
+ case is_illegal_const(Size) of
+ true ->
+ [hipe_rtl:mk_goto(FalseLblName)];
+ false ->
+ Aligned = aligned(Flags),
+ LittleEndian = littleendian(Flags),
+ [NewOffset] = get_real(Dst),
+ case ConstInfo of
+ fail ->
+ [hipe_rtl:mk_goto(FalseLblName)];
+ _ ->
+ case Args of
+ [Src, Base, Offset] ->
+ CCode = static_int_c_code(NewOffset, Src, Base, Offset, Size,
+ Flags, TrueLblName, FalseLblName),
+ put_static_int(NewOffset, Src, Base, Offset, Size, CCode, Aligned,
+ LittleEndian, SrcUnsafe, TrueLblName);
+ [Src, Bits, Base, Offset] ->
+ {SizeCode, SizeReg} =
+ hipe_rtl_binary:make_size(Size, Bits, SystemLimitLblName,
+ FalseLblName),
+ CCode = int_c_code(NewOffset, Src, Base, Offset, SizeReg, Flags,
+ TrueLblName, FalseLblName),
+ InCode = put_dynamic_int(NewOffset, Src, Base, Offset, SizeReg,
+ CCode, Aligned, LittleEndian, SrcUnsafe,
+ TrueLblName),
+ SizeCode ++ InCode
+ end
+ end
+ end.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Code that is used in the append and init writeable functions
@@ -807,28 +772,8 @@ put_float(_NewOffset, _Src, _Base, _Offset, _Size, CCode, _Aligned,
CCode.
put_static_int(NewOffset, Src, Base, Offset, Size, CCode, Aligned,
- LittleEndian, TrueLblName) ->
- {Init, End, UntaggedSrc} = make_init_end(Src, CCode, TrueLblName),
- case {Aligned, LittleEndian} of
- {true, true} ->
- Init ++
- copy_int_little(Base, Offset, NewOffset, Size, UntaggedSrc) ++
- End;
- {true, false} ->
- Init ++
- copy_int_big(Base, Offset, NewOffset, Size, UntaggedSrc) ++
- End;
- {false, true} ->
- CCode;
- {false, false} ->
- Init ++
- copy_offset_int_big(Base, Offset, NewOffset, Size, UntaggedSrc) ++
- End
- end.
-
-put_unsafe_static_int(NewOffset, Src, Base, Offset, Size, CCode, Aligned,
- LittleEndian, TrueLblName) ->
- {Init, End, UntaggedSrc} = make_init_end(Src, TrueLblName),
+ LittleEndian, SrcUnsafe, TrueLblName) ->
+ {Init, End, UntaggedSrc} = make_init_end(Src, CCode, SrcUnsafe, TrueLblName),
case {Aligned, LittleEndian} of
{true, true} ->
Init ++
@@ -847,27 +792,8 @@ put_unsafe_static_int(NewOffset, Src, Base, Offset, Size, CCode, Aligned,
end.
put_dynamic_int(NewOffset, Src, Base, Offset, SizeReg, CCode, Aligned,
- LittleEndian, TrueLblName) ->
- {Init, End, UntaggedSrc} = make_init_end(Src, CCode, TrueLblName),
- case Aligned of
- true ->
- case LittleEndian of
- true ->
- Init ++
- copy_int_little(Base, Offset, NewOffset, SizeReg, UntaggedSrc) ++
- End;
- false ->
- Init ++
- copy_int_big(Base, Offset, NewOffset, SizeReg, UntaggedSrc) ++
- End
- end;
- false ->
- CCode
- end.
-
-put_unsafe_dynamic_int(NewOffset, Src, Base, Offset, SizeReg, CCode, Aligned,
- LittleEndian, TrueLblName) ->
- {Init, End, UntaggedSrc} = make_init_end(Src, TrueLblName),
+ LittleEndian, SrcUnsafe, TrueLblName) ->
+ {Init, End, UntaggedSrc} = make_init_end(Src, CCode, SrcUnsafe, TrueLblName),
case Aligned of
true ->
case LittleEndian of
@@ -884,14 +810,13 @@ put_unsafe_dynamic_int(NewOffset, Src, Base, Offset, SizeReg, CCode, Aligned,
CCode
end.
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Help functions used by the above
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-make_init_end(Src, CCode, TrueLblName) ->
+make_init_end(Src, CCode, false, TrueLblName) ->
[CLbl, SuccessLbl] = create_lbls(2),
[UntaggedSrc] = create_regs(1),
Init = [hipe_tagscheme:test_fixnum(Src, hipe_rtl:label_name(SuccessLbl),
@@ -899,9 +824,8 @@ make_init_end(Src, CCode, TrueLblName) ->
SuccessLbl,
hipe_tagscheme:untag_fixnum(UntaggedSrc,Src)],
End = [hipe_rtl:mk_goto(TrueLblName), CLbl| CCode],
- {Init, End, UntaggedSrc}.
-
-make_init_end(Src, TrueLblName) ->
+ {Init, End, UntaggedSrc};
+make_init_end(Src, _CCode, true, TrueLblName) ->
[UntaggedSrc] = create_regs(1),
Init = [hipe_tagscheme:untag_fixnum(UntaggedSrc,Src)],
End = [hipe_rtl:mk_goto(TrueLblName)],
diff --git a/lib/hipe/rtl/hipe_tagscheme.erl b/lib/hipe/rtl/hipe_tagscheme.erl
index 35d1e7c8a4..68cbe75e85 100644
--- a/lib/hipe/rtl/hipe_tagscheme.erl
+++ b/lib/hipe/rtl/hipe_tagscheme.erl
@@ -40,6 +40,7 @@
fixnum_gt/5, fixnum_lt/5, fixnum_ge/5, fixnum_le/5, fixnum_val/1,
fixnum_mul/4, fixnum_addsub/5, fixnum_andorxor/4, fixnum_not/2,
fixnum_bsr/3, fixnum_bsl/3]).
+-export([test_either_immed/4]).
-export([unsafe_car/2, unsafe_cdr/2,
unsafe_constant_element/3, unsafe_update_element/3, element/6]).
-export([unsafe_closure_element/3]).
@@ -363,14 +364,17 @@ test_matchstate(X, TrueLab, FalseLab, Pred) ->
mask_and_compare(Tmp, ?TAG_HEADER_MASK, ?TAG_HEADER_BIN_MATCHSTATE,
TrueLab, FalseLab, Pred)].
+test_bitstr_header(HdrTmp, TrueLab, FalseLab, Pred) ->
+ Mask = ?TAG_HEADER_MASK - ?BINARY_XXX_MASK,
+ mask_and_compare(HdrTmp, Mask, ?TAG_HEADER_REFC_BIN, TrueLab, FalseLab, Pred).
+
test_bitstr(X, TrueLab, FalseLab, Pred) ->
Tmp = hipe_rtl:mk_new_reg_gcsafe(),
HalfTrueLab = hipe_rtl:mk_new_label(),
- Mask = ?TAG_HEADER_MASK - ?BINARY_XXX_MASK,
[test_is_boxed(X, hipe_rtl:label_name(HalfTrueLab), FalseLab, Pred),
HalfTrueLab,
get_header(Tmp, X),
- mask_and_compare(Tmp, Mask, ?TAG_HEADER_REFC_BIN, TrueLab, FalseLab, Pred)].
+ test_bitstr_header(Tmp, TrueLab, FalseLab, Pred)].
test_binary(X, TrueLab, FalseLab, Pred) ->
Tmp1 = hipe_rtl:mk_new_reg_gcsafe(),
@@ -378,12 +382,10 @@ test_binary(X, TrueLab, FalseLab, Pred) ->
IsBoxedLab = hipe_rtl:mk_new_label(),
IsBitStrLab = hipe_rtl:mk_new_label(),
IsSubBinLab = hipe_rtl:mk_new_label(),
- Mask = ?TAG_HEADER_MASK - ?BINARY_XXX_MASK,
[test_is_boxed(X, hipe_rtl:label_name(IsBoxedLab), FalseLab, Pred),
IsBoxedLab,
get_header(Tmp1, X),
- mask_and_compare(Tmp1, Mask, ?TAG_HEADER_REFC_BIN,
- hipe_rtl:label_name(IsBitStrLab), FalseLab, Pred),
+ test_bitstr_header(Tmp1, hipe_rtl:label_name(IsBitStrLab), FalseLab, Pred),
IsBitStrLab,
mask_and_compare(Tmp1, ?TAG_HEADER_MASK, ?TAG_HEADER_SUB_BIN,
hipe_rtl:label_name(IsSubBinLab), TrueLab, 0.5),
@@ -453,6 +455,10 @@ test_fixnums_1([Arg1, Arg2|Args], Acc) ->
Tmp = hipe_rtl:mk_new_reg_gcsafe(),
test_fixnums_1([Tmp|Args], [hipe_rtl:mk_alu(Tmp, Arg1, 'and', Arg2)|Acc]).
+test_two_fixnums(Arg, Arg, FalseLab) ->
+ TrueLab = hipe_rtl:mk_new_label(),
+ [test_fixnum(Arg, hipe_rtl:label_name(TrueLab), FalseLab, 0.99),
+ TrueLab];
test_two_fixnums(Arg1, Arg2, FalseLab) ->
TrueLab = hipe_rtl:mk_new_label(),
case hipe_rtl:is_imm(Arg1) orelse hipe_rtl:is_imm(Arg2) of
@@ -567,8 +573,8 @@ fixnum_andorxor(AluOp, Arg1, Arg2, Res) ->
case AluOp of
'xor' ->
Tmp = hipe_rtl:mk_new_reg_gcsafe(),
- [hipe_rtl:mk_alu(Tmp, Arg1, 'xor', Arg2), % clears tag :-(
- hipe_rtl:mk_alu(Res, Tmp, 'or', hipe_rtl:mk_imm(?TAG_IMMED1_SMALL))];
+ [hipe_rtl:mk_alu(Tmp, Arg1, 'sub', hipe_rtl:mk_imm(?TAG_IMMED1_SMALL)),
+ hipe_rtl:mk_alu(Res, Tmp, 'xor', Arg2)];
_ -> hipe_rtl:mk_alu(Res, Arg1, AluOp, Arg2)
end.
@@ -595,6 +601,21 @@ fixnum_bsl(Arg1, Arg2, Res) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% Test if either of two values are immediate (primary tag IMMED1, 0x3)
+test_either_immed(Arg1, Arg2, TrueLab, FalseLab) ->
+ %% This test assumes primary tag 0x0 is reserved and immed has tag 0x3
+ 16#0 = ?TAG_PRIMARY_HEADER,
+ 16#3 = ?TAG_PRIMARY_IMMED1,
+ Tmp1 = hipe_rtl:mk_new_reg_gcsafe(),
+ Tmp2 = hipe_rtl:mk_new_reg_gcsafe(),
+ [hipe_rtl:mk_alu(Tmp1, Arg1, 'sub', hipe_rtl:mk_imm(1)),
+ hipe_rtl:mk_alu(Tmp2, Arg2, 'sub', hipe_rtl:mk_imm(1)),
+ hipe_rtl:mk_alu(Tmp2, Tmp2, 'or', Tmp1),
+ hipe_rtl:mk_branch(Tmp2, 'and', hipe_rtl:mk_imm(2), eq,
+ FalseLab, TrueLab, 0.01)].
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
unsafe_car(Dst, Arg) ->
hipe_rtl:mk_load(Dst, Arg, hipe_rtl:mk_imm(-(?TAG_PRIMARY_LIST))).
@@ -631,14 +652,13 @@ unsafe_update_element(Tuple, Index, Value) -> % Index is an immediate
element(Dst, Index, Tuple, FailLabName, {tuple, A}, IndexInfo) ->
FixnumOkLab = hipe_rtl:mk_new_label(),
IndexOkLab = hipe_rtl:mk_new_label(),
- Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple
UIndex = hipe_rtl:mk_new_reg_gcsafe(),
Arity = hipe_rtl:mk_imm(A),
- InvIndex = hipe_rtl:mk_new_reg_gcsafe(),
- Offset = hipe_rtl:mk_new_reg_gcsafe(),
case IndexInfo of
valid ->
%% This is no branch, 1 load and 3 alus = 4 instr
+ Offset = hipe_rtl:mk_new_reg_gcsafe(),
+ Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple
[untag_fixnum(UIndex, Index),
hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)),
hipe_rtl:mk_alu(Offset, UIndex, 'sll',
@@ -647,72 +667,56 @@ element(Dst, Index, Tuple, FailLabName, {tuple, A}, IndexInfo) ->
fixnums ->
%% This is 1 branch, 1 load and 4 alus = 6 instr
[untag_fixnum(UIndex, Index),
- hipe_rtl:mk_alu(Ptr, Tuple, 'sub',hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED))|
- gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, UIndex,
- FailLabName, IndexOkLab)];
+ gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)];
_ ->
%% This is 3 branches, 1 load and 5 alus = 9 instr
[test_fixnum(Index, hipe_rtl:label_name(FixnumOkLab),
FailLabName, 0.99),
FixnumOkLab,
untag_fixnum(UIndex, Index),
- hipe_rtl:mk_alu(Ptr, Tuple, 'sub',hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED))|
- gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, UIndex,
- FailLabName, IndexOkLab)]
+ gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)]
end;
element(Dst, Index, Tuple, FailLabName, tuple, IndexInfo) ->
FixnumOkLab = hipe_rtl:mk_new_label(),
IndexOkLab = hipe_rtl:mk_new_label(),
- Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple
Header = hipe_rtl:mk_new_reg_gcsafe(),
UIndex = hipe_rtl:mk_new_reg_gcsafe(),
Arity = hipe_rtl:mk_new_reg_gcsafe(),
- InvIndex = hipe_rtl:mk_new_reg_gcsafe(),
- Offset = hipe_rtl:mk_new_reg_gcsafe(),
case IndexInfo of
fixnums ->
%% This is 1 branch, 2 loads and 5 alus = 8 instr
- [hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)),
- hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)),
+ [get_header(Header, Tuple),
untag_fixnum(UIndex, Index),
hipe_rtl:mk_alu(Arity,Header,'srl',hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))|
- gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, UIndex,
- FailLabName, IndexOkLab)];
+ gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)];
Num when is_integer(Num) ->
%% This is 1 branch, 1 load and 3 alus = 5 instr
- [hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED))|
- gen_element_tail(Dst, Ptr, InvIndex, hipe_rtl:mk_imm(Num),
- Offset, UIndex, FailLabName, IndexOkLab)];
+ gen_element_tail(Dst, Tuple, hipe_rtl:mk_imm(Num), UIndex, FailLabName,
+ IndexOkLab);
_ ->
%% This is 2 branches, 2 loads and 6 alus = 10 instr
[test_fixnum(Index, hipe_rtl:label_name(FixnumOkLab), FailLabName, 0.99),
FixnumOkLab,
- hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)),
- hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)),
+ get_header(Header, Tuple),
untag_fixnum(UIndex, Index),
hipe_rtl:mk_alu(Arity,Header,'srl',hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))|
- gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset, UIndex,
- FailLabName, IndexOkLab)]
+ gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)]
end;
element(Dst, Index, Tuple, FailLabName, unknown, IndexInfo) ->
FixnumOkLab = hipe_rtl:mk_new_label(),
BoxedOkLab = hipe_rtl:mk_new_label(),
TupleOkLab = hipe_rtl:mk_new_label(),
IndexOkLab = hipe_rtl:mk_new_label(),
- Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple
Header = hipe_rtl:mk_new_reg_gcsafe(),
UIndex = hipe_rtl:mk_new_reg_gcsafe(),
Arity = hipe_rtl:mk_new_reg_gcsafe(),
- InvIndex = hipe_rtl:mk_new_reg_gcsafe(),
- Offset = hipe_rtl:mk_new_reg_gcsafe(),
case IndexInfo of
fixnums ->
%% This is 3 branches, 2 loads and 5 alus = 10 instr
[test_is_boxed(Tuple, hipe_rtl:label_name(BoxedOkLab),
FailLabName, 0.99),
BoxedOkLab,
- hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)),
- hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)),
+ get_header(Header, Tuple),
hipe_rtl:mk_branch(Header, 'and',
hipe_rtl:mk_imm(?TAG_HEADER_MASK), 'eq',
hipe_rtl:label_name(TupleOkLab), FailLabName, 0.99),
@@ -720,23 +724,21 @@ element(Dst, Index, Tuple, FailLabName, unknown, IndexInfo) ->
untag_fixnum(UIndex, Index),
hipe_rtl:mk_alu(Arity, Header, 'srl',
hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))|
- gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset,
- UIndex, FailLabName, IndexOkLab)];
+ gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)];
Num when is_integer(Num) ->
%% This is 3 branches, 2 loads and 4 alus = 9 instr
[test_is_boxed(Tuple, hipe_rtl:label_name(BoxedOkLab),
FailLabName, 0.99),
BoxedOkLab,
- hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)),
- hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)),
+ get_header(Header, Tuple),
hipe_rtl:mk_branch(Header, 'and',
hipe_rtl:mk_imm(?TAG_HEADER_MASK), 'eq',
hipe_rtl:label_name(TupleOkLab), FailLabName, 0.99),
TupleOkLab,
hipe_rtl:mk_alu(Arity, Header, 'srl',
hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))|
- gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset,
- hipe_rtl:mk_imm(Num), FailLabName, IndexOkLab)];
+ gen_element_tail(Dst, Tuple, Arity, hipe_rtl:mk_imm(Num), FailLabName,
+ IndexOkLab)];
_ ->
%% This is 4 branches, 2 loads, and 6 alus = 12 instr :(
[test_fixnum(Index, hipe_rtl:label_name(FixnumOkLab),
@@ -745,8 +747,7 @@ element(Dst, Index, Tuple, FailLabName, unknown, IndexInfo) ->
test_is_boxed(Tuple, hipe_rtl:label_name(BoxedOkLab),
FailLabName, 0.99),
BoxedOkLab,
- hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)),
- hipe_rtl:mk_load(Header, Ptr, hipe_rtl:mk_imm(0)),
+ get_header(Header, Tuple),
hipe_rtl:mk_branch(Header, 'and',
hipe_rtl:mk_imm(?TAG_HEADER_MASK), 'eq',
hipe_rtl:label_name(TupleOkLab), FailLabName, 0.99),
@@ -754,20 +755,21 @@ element(Dst, Index, Tuple, FailLabName, unknown, IndexInfo) ->
untag_fixnum(UIndex, Index),
hipe_rtl:mk_alu(Arity, Header, 'srl',
hipe_rtl:mk_imm(?HEADER_ARITY_OFFS))|
- gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset,
- UIndex, FailLabName, IndexOkLab)]
+ gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab)]
end.
-gen_element_tail(Dst, Ptr, InvIndex, Arity, Offset,
- UIndex, FailLabName, IndexOkLab) ->
+gen_element_tail(Dst, Tuple, Arity, UIndex, FailLabName, IndexOkLab) ->
+ ZeroIndex = hipe_rtl:mk_new_reg_gcsafe(),
+ Offset = hipe_rtl:mk_new_reg_gcsafe(),
+ Ptr = hipe_rtl:mk_new_reg(), % offset from Tuple
%% now check that 1 <= UIndex <= Arity
- %% if UIndex < 1, then (Arity - UIndex) >= Arity
- %% if UIndex > Arity, then (Arity - UIndex) < 0, which is >=u Arity
- %% otherwise, 0 <= (Arity - UIndex) < Arity
- [hipe_rtl:mk_alu(InvIndex, Arity, 'sub', UIndex),
- hipe_rtl:mk_branch(InvIndex, 'geu', Arity, FailLabName,
+ %% by checking the equivalent (except for when Arity>=2^(WordSize-1))
+ %% (UIndex - 1) <u Arity
+ [hipe_rtl:mk_alu(ZeroIndex, UIndex, 'sub', hipe_rtl:mk_imm(1)),
+ hipe_rtl:mk_branch(ZeroIndex, 'geu', Arity, FailLabName,
hipe_rtl:label_name(IndexOkLab), 0.01),
IndexOkLab,
+ hipe_rtl:mk_alu(Ptr, Tuple, 'sub', hipe_rtl:mk_imm(?TAG_PRIMARY_BOXED)),
hipe_rtl:mk_alu(Offset, UIndex, 'sll',
hipe_rtl:mk_imm(hipe_rtl_arch:log2_word_size())),
hipe_rtl:mk_load(Dst, Ptr, Offset)].
diff --git a/lib/hipe/test/basic_SUITE_data/basic_tuples.erl b/lib/hipe/test/basic_SUITE_data/basic_tuples.erl
index 94c187e364..96e39d565a 100644
--- a/lib/hipe/test/basic_SUITE_data/basic_tuples.erl
+++ b/lib/hipe/test/basic_SUITE_data/basic_tuples.erl
@@ -55,6 +55,8 @@ test_element(T0, T1, T2, N) ->
List = lists:seq(1, N),
Tuple = list_to_tuple(List),
ok = get_elements(List, Tuple, 1),
+ %% element/2 of larger tuple with omitted bounds test
+ true = lists:all(fun(I) -> I * I =:= square(I) end, lists:seq(1, 20)),
%% some cases that throw exceptions
{'EXIT', _} = (catch my_element(0, T2)),
{'EXIT', _} = (catch my_element(3, T2)),
@@ -73,6 +75,18 @@ get_elements([Element|Rest], Tuple, Pos) ->
get_elements([], _Tuple, _Pos) ->
ok.
+squares() ->
+ {1*1, 2*2, 3*3, 4*4, 5*5, 6*6, 7*7, 8*8, 9*9, 10*10,
+ 11*11, 12*12, 13*13, 14*14, 15*15, 16*16, 17*17, 18*18, 19*19, 20*20}.
+
+square(N) when is_integer(N), N >= 1, N =< 20 ->
+ %% The guard tests lets the range analysis conclude N to be an integer in the
+ %% 1..20 range. 20-1=19 is bigger than ?SET_LIMIT in erl_types.erl, and will
+ %% thus be represented by an ?int_range() rather than an ?int_set().
+ %% Because of the range analysis, the bounds test of this element/2 call
+ %% should be omitted.
+ element(N, squares()).
+
%%--------------------------------------------------------------------
%% Tests set_element/3.
diff --git a/lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl b/lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl
index 76b2a91f94..cce91530f4 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl
@@ -14,7 +14,6 @@ test() ->
"hi2" => lists:subtract([1,2],[1]),
"hi3" => +3,
"hi1" => erlang:min(1,2),
- "hi1" => erlang:hash({1,2},35),
"hi1" => erlang:phash({1,2},33),
"hi1" => erlang:phash2({1,2},34),
"hi1" => erlang:integer_to_binary(1337),
diff --git a/lib/hipe/x86/hipe_rtl_to_x86.erl b/lib/hipe/x86/hipe_rtl_to_x86.erl
index 29cad6ca51..31e4f6e4ac 100644
--- a/lib/hipe/x86/hipe_rtl_to_x86.erl
+++ b/lib/hipe/x86/hipe_rtl_to_x86.erl
@@ -124,7 +124,6 @@ conv_insn(I, Map, Data) ->
hipe_rtl:call_continuation(I),
hipe_rtl:call_fail(I),
hipe_rtl:call_type(I)),
- %% XXX Fixme: this ++ is probably inefficient.
{FixArgs++I2, Map2, Data};
#comment{} ->
I2 = [hipe_x86:mk_comment(hipe_rtl:comment_text(I))],
diff --git a/lib/hipe/x86/hipe_x86_assemble.erl b/lib/hipe/x86/hipe_x86_assemble.erl
index ef9c32ef41..fb0beba293 100644
--- a/lib/hipe/x86/hipe_x86_assemble.erl
+++ b/lib/hipe/x86/hipe_x86_assemble.erl
@@ -148,6 +148,8 @@ insn_size(I) ->
translate_insn(I, Context, Options) ->
case I of
+ #alu{aluop='xor', src=#x86_temp{reg=Reg}=Src, dst=#x86_temp{reg=Reg}=Dst} ->
+ [{'xor', {temp_to_reg32(Dst), temp_to_rm32(Src)}, I}];
#alu{} ->
Arg = resolve_alu_args(hipe_x86:alu_src(I), hipe_x86:alu_dst(I), Context),
[{hipe_x86:alu_op(I), Arg, I}];
@@ -228,11 +230,11 @@ translate_insn(I, Context, Options) ->
#move64{} ->
translate_move64(I, Context);
#movsx{} ->
- Arg = resolve_movx_args(hipe_x86:movsx_src(I), hipe_x86:movsx_dst(I)),
- [{movsx, Arg, I}];
+ Src = resolve_movx_src(hipe_x86:movsx_src(I)),
+ [{movsx, {temp_to_regArch(hipe_x86:movsx_dst(I)), Src}, I}];
#movzx{} ->
- Arg = resolve_movx_args(hipe_x86:movzx_src(I), hipe_x86:movzx_dst(I)),
- [{movzx, Arg, I}];
+ Src = resolve_movx_src(hipe_x86:movzx_src(I)),
+ [{movzx, {temp_to_reg32(hipe_x86:movzx_dst(I)), Src}, I}];
%% pseudo_call: eliminated before assembly
%% pseudo_jcc: eliminated before assembly
%% pseudo_tailcall: eliminated before assembly
@@ -845,16 +847,15 @@ translate_move64(I, _Context) -> exit({?MODULE, I}).
-endif.
%%% mov{s,z}x
-resolve_movx_args(Src=#x86_mem{type=Type}, Dst=#x86_temp{}) ->
- {temp_to_regArch(Dst),
- case Type of
- byte ->
- mem_to_rm8(Src);
- int16 ->
- mem_to_rm16(Src);
- int32 ->
- mem_to_rm32(Src)
- end}.
+resolve_movx_src(Src=#x86_mem{type=Type}) ->
+ case Type of
+ byte ->
+ mem_to_rm8(Src);
+ int16 ->
+ mem_to_rm16(Src);
+ int32 ->
+ mem_to_rm32(Src)
+ end.
%%% alu/cmp (_not_ test)
resolve_alu_args(Src, Dst, Context) ->
diff --git a/lib/hipe/x86/hipe_x86_postpass.erl b/lib/hipe/x86/hipe_x86_postpass.erl
index b84e9bed91..925054dd68 100644
--- a/lib/hipe/x86/hipe_x86_postpass.erl
+++ b/lib/hipe/x86/hipe_x86_postpass.erl
@@ -57,9 +57,10 @@ postpass(#defun{code=Code0}=Defun, Options) ->
peephole_optimization(Insns) ->
peep(Insns, [], []).
-%% MoveSelf related peep-opts
+
+%% MoveSelf related peep-opts
%% ------------------------------
-peep([#fmove{src=Src, dst=Src} | Insns], Res,Lst) ->
+peep([#fmove{src=Src, dst=Src} | Insns], Res,Lst) ->
peep(Insns, Res, [moveSelf1|Lst]);
peep([I=#fmove{src=Src, dst=Dst},
#fmove{src=Dst, dst=Src} | Insns], Res,Lst) ->
@@ -159,8 +160,7 @@ peep([#jcc{label=Lab}, I=#label{label=Lab}|Insns], Res, Lst) ->
%% ElimSet0
%% --------
-peep([#move{src=#x86_imm{value=0},dst=Dst}|Insns],Res,Lst)
-when (Dst==#x86_temp{}) ->
+peep([#move{src=#x86_imm{value=0},dst=Dst=#x86_temp{}}|Insns],Res,Lst) ->
peep(Insns, [#alu{aluop='xor', src=Dst, dst=Dst}|Res], [elimSet0|Lst]);
%% ElimMDPow2
diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml
index 398fc7e5b6..1d8432ee35 100644
--- a/lib/inets/doc/src/notes.xml
+++ b/lib/inets/doc/src/notes.xml
@@ -33,7 +33,22 @@
<file>notes.xml</file>
</header>
- <section><title>Inets 6.3.4</title>
+ <section><title>Inets 6.3.5</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Correct misstakes in ftp client introduced in inets-6.3.4</p>
+ <p>
+ Own Id: OTP-14203 Aux Id: OTP-13982 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 6.3.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
@@ -698,7 +713,7 @@
<list>
<item>
<p>
- Gracefully handle invalid content-lenght headers instead
+ Gracefully handle invalid content-length headers instead
of crashing in list_to_integer.</p>
<p>
Own Id: OTP-12429</p>
diff --git a/lib/inets/src/ftp/ftp.erl b/lib/inets/src/ftp/ftp.erl
index 911f5b71a7..23d6483291 100644
--- a/lib/inets/src/ftp/ftp.erl
+++ b/lib/inets/src/ftp/ftp.erl
@@ -1477,10 +1477,7 @@ handle_info({Trpt, Socket, Data}, #state{dsock = {Trpt,Socket}} = State0) when T
handle_info({Cls, Socket}, #state{dsock = {Trpt,Socket},
caller = {recv_file, Fd}} = State)
when {Cls,Trpt}=={tcp_closed,tcp} ; {Cls,Trpt}=={ssl_closed,ssl} ->
- case file_close(Fd) of
- ok -> ok;
- {error,einval} -> ok
- end,
+ file_close(Fd),
progress_report({transfer_size, 0}, State),
activate_ctrl_connection(State),
{noreply, State#state{dsock = undefined, data = <<>>}};
@@ -2066,10 +2063,7 @@ handle_ctrl_result({pos_prel, _}, #state{caller = {recv_file, _}} = State0) ->
end;
handle_ctrl_result({Status, _}, #state{caller = {recv_file, Fd}} = State) ->
- case file_close(Fd) of
- ok -> ok;
- {error, einval} -> ok
- end,
+ file_close(Fd),
close_data_connection(State),
ctrl_result_response(Status, State#state{dsock = undefined},
{error, epath});
@@ -2345,7 +2339,7 @@ accept_data_connection(#state{mode = passive} = State) ->
send_ctrl_message(_S=#state{csock = Socket, verbose = Verbose}, Message) ->
verbose(lists:flatten(Message),Verbose,send),
?DBG('<--ctrl ~p ---- ~s~p~n',[Socket,Message,_S]),
- ok = send_message(Socket, Message).
+ _ = send_message(Socket, Message).
send_data_message(_S=#state{dsock = Socket}, Message) ->
?DBG('<==data ~p ==== ~s~n~p~n',[Socket,Message,_S]),
@@ -2366,37 +2360,44 @@ send_message({tcp, Socket}, Message) ->
send_message({ssl, Socket}, Message) ->
ssl:send(Socket, Message).
-activate_ctrl_connection(#state{csock = Socket, ctrl_data = {<<>>, _, _}}) ->
- ok = activate_connection(Socket);
-activate_ctrl_connection(#state{csock = Socket}) ->
- ok = activate_connection(Socket),
+activate_ctrl_connection(#state{csock = CSock, ctrl_data = {<<>>, _, _}}) ->
+ activate_connection(CSock);
+activate_ctrl_connection(#state{csock = CSock}) ->
+ activate_connection(CSock),
%% We have already received at least part of the next control message,
%% that has been saved in ctrl_data, process this first.
- self() ! {socket_type(Socket), unwrap_socket(Socket), <<>>},
+ self() ! {socket_type(CSock), unwrap_socket(CSock), <<>>},
ok.
+activate_data_connection(#state{dsock = DSock} = State) ->
+ activate_connection(DSock),
+ State.
+
+activate_connection(Socket) ->
+ ignore_return_value(
+ case socket_type(Socket) of
+ tcp -> inet:setopts(unwrap_socket(Socket), [{active, once}]);
+ ssl -> ssl:setopts(unwrap_socket(Socket), [{active, once}])
+ end).
+
+
+ignore_return_value(_) -> ok.
+
unwrap_socket({tcp,Socket}) -> Socket;
unwrap_socket({ssl,Socket}) -> Socket.
socket_type({tcp,_Socket}) -> tcp;
socket_type({ssl,_Socket}) -> ssl.
-activate_data_connection(#state{dsock = Socket} = State) ->
- ok = activate_connection(Socket),
- State.
-
-activate_connection({tcp, Socket}) -> inet:setopts(Socket, [{active, once}]);
-activate_connection({ssl, Socket}) -> ssl:setopts(Socket, [{active, once}]).
-
close_ctrl_connection(#state{csock = undefined}) -> ok;
close_ctrl_connection(#state{csock = Socket}) -> close_connection(Socket).
close_data_connection(#state{dsock = undefined}) -> ok;
close_data_connection(#state{dsock = Socket}) -> close_connection(Socket).
-close_connection({lsock,Socket}) -> gen_tcp:close(Socket);
-close_connection({tcp, Socket}) -> gen_tcp:close(Socket);
-close_connection({ssl, Socket}) -> ssl:close(Socket).
+close_connection({lsock,Socket}) -> ignore_return_value( gen_tcp:close(Socket) );
+close_connection({tcp, Socket}) -> ignore_return_value( gen_tcp:close(Socket) );
+close_connection({ssl, Socket}) -> ignore_return_value( ssl:close(Socket) ).
%% ------------ FILE HANDLING ----------------------------------------
send_file(#state{tls_upgrading_data_connection = {true, CTRL, _}} = State, Fd) ->
@@ -2408,7 +2409,7 @@ send_file(State, Fd) ->
progress_report({binary, Bin}, State),
send_file(State, Fd);
{ok, _, _} ->
- ok = file_close(Fd),
+ file_close(Fd),
close_data_connection(State),
progress_report({transfer_size, 0}, State),
activate_ctrl_connection(State),
@@ -2423,7 +2424,7 @@ file_open(File, Option) ->
file:open(File, [raw, binary, Option]).
file_close(Fd) ->
- file:close(Fd).
+ ignore_return_value( file:close(Fd) ).
file_read(Fd) ->
case file:read(Fd, ?FILE_BUFSIZE) of
diff --git a/lib/inets/src/http_client/httpc_response.erl b/lib/inets/src/http_client/httpc_response.erl
index 0fd5faa466..d24705a845 100644
--- a/lib/inets/src/http_client/httpc_response.erl
+++ b/lib/inets/src/http_client/httpc_response.erl
@@ -434,7 +434,7 @@ format_response({StatusLine, Headers, Body}) ->
Length = list_to_integer(Headers#http_response_h.'content-length'),
{NewBody, Data} =
case Length of
- -1 -> % When no lenght indicator is provided
+ -1 -> % When no length indicator is provided
{Body, <<>>};
Length when (Length =< size(Body)) ->
<<BodyThisReq:Length/binary, Next/binary>> = Body,
diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl
index 7e20a9ba67..82273c8c74 100644
--- a/lib/inets/src/http_server/httpd_request_handler.erl
+++ b/lib/inets/src/http_server/httpd_request_handler.erl
@@ -241,9 +241,9 @@ handle_info({tcp_closed, _}, State) ->
handle_info({ssl_closed, _}, State) ->
{stop, normal, State};
handle_info({tcp_error, _, _} = Reason, State) ->
- {stop, Reason, State};
+ {stop, {shutdown, Reason}, State};
handle_info({ssl_error, _, _} = Reason, State) ->
- {stop, Reason, State};
+ {stop, {shutdown, Reason}, State};
%% Timeouts
handle_info(timeout, #state{mfa = {_, parse, _}} = State) ->
diff --git a/lib/inets/src/inets_app/inets.appup.src b/lib/inets/src/inets_app/inets.appup.src
index 3a31daeb20..d28d4cd766 100644
--- a/lib/inets/src/inets_app/inets.appup.src
+++ b/lib/inets/src/inets_app/inets.appup.src
@@ -18,10 +18,14 @@
%% %CopyrightEnd%
{"%VSN%",
[
+ {<<"6.2.4">>, [{load_module, httpd_request_handler,
+ soft_purge, soft_purge, []}]},
{<<"6\\..*">>,[{restart_application, inets}]},
{<<"5\\..*">>,[{restart_application, inets}]}
],
[
+ {<<"6.2.4">>, [{load_module, httpd_request_handler,
+ soft_purge, soft_purge, []}]},
{<<"6\\..*">>,[{restart_application, inets}]},
{<<"5\\..*">>,[{restart_application, inets}]}
]
diff --git a/lib/inets/test/httpd_1_1.erl b/lib/inets/test/httpd_1_1.erl
index 3755ed117b..2b5968ca12 100644
--- a/lib/inets/test/httpd_1_1.erl
+++ b/lib/inets/test/httpd_1_1.erl
@@ -405,11 +405,11 @@ getRangeSize(Head)->
{multiPart, BoundaryString};
_X1 ->
case re:run(Head, ?CONTENT_RANGE "bytes=.*\r\n", [{capture, first}]) of
- {match, [{Start, Lenght}]} ->
+ {match, [{Start, Length}]} ->
%% Get the range data remove the fieldname and the
%% end of line.
RangeInfo = string:substr(Head, Start + 1 + 20,
- Lenght - (20 +2)),
+ Length - (20 +2)),
rangeSize(string:strip(RangeInfo));
_X2 ->
error
diff --git a/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf b/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf
index 3f9fde03b5..ec05fc6714 100644
--- a/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf
+++ b/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf
@@ -128,7 +128,7 @@ SecurityDiskLogSize 200000 10
MaxClients 50
-# KeepAlive set the flag for persistent connections. For peristent connections
+# KeepAlive set the flag for persistent connections. For persistent connections
# set KeepAlive to on. To use One request per connection set the flag to off
# Note: The value has changed since previous version of INETS.
KeepAlive on
diff --git a/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf b/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf
index 3f9fde03b5..ec05fc6714 100644
--- a/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf
+++ b/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf
@@ -128,7 +128,7 @@ SecurityDiskLogSize 200000 10
MaxClients 50
-# KeepAlive set the flag for persistent connections. For peristent connections
+# KeepAlive set the flag for persistent connections. For persistent connections
# set KeepAlive to on. To use One request per connection set the flag to off
# Note: The value has changed since previous version of INETS.
KeepAlive on
diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk
index eef5abd610..9591ab22ed 100644
--- a/lib/inets/vsn.mk
+++ b/lib/inets/vsn.mk
@@ -19,6 +19,6 @@
# %CopyrightEnd%
APPLICATION = inets
-INETS_VSN = 6.3.4
+INETS_VSN = 6.3.5
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)"
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
index 7891871e76..b9b4223155 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
@@ -30,7 +30,7 @@ import java.util.Random;
* received from the peer.
*
* <p>
- * This abstract class provides the neccesary methods to maintain the actual
+ * This abstract class provides the necessary methods to maintain the actual
* connection and encode the messages and headers in the proper format according
* to the Erlang distribution protocol. Subclasses can use these methods to
* provide a more or less transparent communication channel as desired.
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java
index 70c9e6db4a..bd3a3f4ad3 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java
@@ -38,7 +38,7 @@ package com.ericsson.otp.erlang;
* <p>
* Mailboxes can be named, either at creation or later. Messages can be sent to
* named mailboxes and named Erlang processes without knowing the
- * {@link OtpErlangPid pid} that identifies the mailbox. This is neccessary in
+ * {@link OtpErlangPid pid} that identifies the mailbox. This is necessary in
* order to set up initial communication between parts of an application. Each
* mailbox can have at most one name.
* </p>
diff --git a/lib/kernel/doc/src/code.xml b/lib/kernel/doc/src/code.xml
index f881fd76fd..878a450f0f 100644
--- a/lib/kernel/doc/src/code.xml
+++ b/lib/kernel/doc/src/code.xml
@@ -258,7 +258,7 @@ zip:create("mnesia-4.4.7.ez",
both strings and atoms, but a future release will probably only allow
the arguments that are documented.</p>
- <p>As from Erlang/OTP R12B, functions in this module generally fail with an
+ <p>Functions in this module generally fail with an
exception if they are passed an incorrect type (for example, an integer or a tuple
where an atom is expected). An error tuple is returned if the argument type
is correct, but there are some other errors (for example, a non-existing directory
diff --git a/lib/kernel/doc/src/config.xml b/lib/kernel/doc/src/config.xml
index c5f37fd036..c10f11b187 100644
--- a/lib/kernel/doc/src/config.xml
+++ b/lib/kernel/doc/src/config.xml
@@ -77,8 +77,8 @@
to update the application configurations.</p>
<p>This means that specifying another <c>.config</c> file, or more
<c>.config</c> files, leads to inconsistent update of application
- configurations. Therefore, in Erlang 5.4/OTP R10B, the syntax of
- <c>sys.config</c> was extended to allow pointing out other
+ configurations. There is, however, a syntax for
+ <c>sys.config</c> that allows pointing out other
<c>.config</c> files:</p>
<code type="none">
[{Application, [{Par, Val}]} | File].</code>
diff --git a/lib/kernel/doc/src/kernel_app.xml b/lib/kernel/doc/src/kernel_app.xml
index df681a505f..b342fff0d3 100644
--- a/lib/kernel/doc/src/kernel_app.xml
+++ b/lib/kernel/doc/src/kernel_app.xml
@@ -11,7 +11,7 @@
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-
+
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
@@ -19,7 +19,7 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-
+
</legalnotice>
<title>kernel</title>
@@ -58,6 +58,60 @@
</section>
<section>
+ <title>OS Signal Event Handler</title>
+ <p>Asynchronous OS signals may be subscribed to via the Kernel applications event manager
+ (see <seealso marker="doc/design_principles:des_princ">OTP Design Principles</seealso> and
+ <seealso marker="stdlib:gen_event"><c>gen_event(3)</c></seealso>) registered as <c>erl_signal_server</c>.
+ A default signal handler is installed which handles the following signals:</p>
+ <taglist>
+ <tag><c>sigusr1</c></tag>
+ <item><p>The default handler will halt Erlang and produce a crashdump
+ with slogan "Received SIGUSR1".
+ This is equivalent to calling <c>erlang:halt("Received SIGUSR1")</c>.
+ </p></item>
+
+ <tag><c>sigquit</c></tag>
+ <item><p>The default handler will halt Erlang immediately.
+ This is equivalent to calling <c>erlang:halt()</c>.
+ </p></item>
+
+ <tag><c>sigterm</c></tag>
+ <item><p>The default handler will terminate Erlang normally.
+ This is equivalent to calling <c>init:stop()</c>.
+ </p></item>
+ </taglist>
+
+ <section>
+ <title>Events</title>
+ <p>Any event handler added to <c>erl_signal_server</c> must handle the following events.</p>
+ <taglist>
+ <tag><c>sighup</c></tag>
+ <item><p>Hangup detected on controlling terminal or death of controlling process</p></item>
+ <tag><c>sigquit</c></tag>
+ <item><p>Quit from keyboard</p></item>
+ <tag><c>sigabrt</c></tag>
+ <item><p>Abort signal from abort</p></item>
+ <tag><c>sigalrm</c></tag>
+ <item><p>Timer signal from alarm</p></item>
+ <tag><c>sigterm</c></tag>
+ <item><p>Termination signal</p></item>
+ <tag><c>sigusr1</c></tag>
+ <item><p>User-defined signal 1</p></item>
+ <tag><c>sigusr2</c></tag>
+ <item><p>User-defined signal 2</p></item>
+ <tag><c>sigchld</c></tag>
+ <item><p>Child process stopped or terminated</p></item>
+ <tag><c>sigstop</c></tag>
+ <item><p>Stop process</p></item>
+ <tag><c>sigtstp</c></tag>
+ <item><p>Stop typed at terminal</p></item>
+ </taglist>
+
+ <p>Setting OS signals are described in <seealso marker="os#set_signal/2"><c>os:set_signal/2</c></seealso>.</p>
+ </section>
+ </section>
+
+ <section>
<title>Configuration</title>
<p>The following configuration parameters are defined for the Kernel
application. For more information about configuration parameters,
@@ -379,6 +433,28 @@ MaxT = TickTime + TickTime / 4</code>
return as soon as possible for <c>application_controller</c>
to terminate properly.</p>
</item>
+ <tag><c>source_search_rules = [DirRule] | [SuffixRule] </c></tag>
+ <item>
+ <marker id="source_search_rules"></marker>
+ <p>Where:</p>
+ <list type="bulleted">
+ <item><c>DirRule = {ObjDirSuffix,SrcDirSuffix}</c></item>
+ <item><c>SuffixRule = {ObjSuffix,SrcSuffix,[DirRule]}</c></item>
+ <item><c>ObjDirSuffix = string()</c></item>
+ <item><c>SrcDirSuffix = string()</c></item>
+ <item><c>ObjSuffix = string()</c></item>
+ <item><c>SrcSuffix = string()</c></item>
+ </list>
+ <p>Specifies a list of rules for use by <c>filelib:find_file/2</c> and
+ <c>filelib:find_source/2</c>. If this is set to some other value
+ than the empty list, it replaces the default rules. Rules can be
+ simple pairs of directory suffixes, such as <c>{"ebin",
+ "src"}</c>, which are used by <c>filelib:find_file/2</c>, or
+ triples specifying separate directory suffix rules depending on
+ file name extensions, for example <c>[{".beam", ".erl", [{"ebin",
+ "src"}]}</c>, which are used by <c>filelib:find_source/2</c>. Both
+ kinds of rules can be mixed in the list.</p>
+ </item>
</taglist>
</section>
@@ -405,4 +481,3 @@ MaxT = TickTime + TickTime / 4</code>
<seealso marker="stdlib:timer"><c>timer(3)</c></seealso></p>
</section>
</appref>
-
diff --git a/lib/kernel/doc/src/notes.xml b/lib/kernel/doc/src/notes.xml
index 9277c2d353..d80c4f077c 100644
--- a/lib/kernel/doc/src/notes.xml
+++ b/lib/kernel/doc/src/notes.xml
@@ -108,7 +108,7 @@
<item>
<p>
Close stdin of commands run in os:cmd. This is a
- backwards compatiblity fix that restores the behaviour of
+ backwards compatibility fix that restores the behaviour of
pre 19.0 os:cmd.</p>
<p>
Own Id: OTP-13867 Aux Id: seq13178 </p>
@@ -1445,7 +1445,7 @@
dependent, so applications aiming to be portable should
consider using <c>{ipv6_v6only,true}</c> when creating an
<c>inet6</c> listening/destination socket, and if
- neccesary also create an <c>inet</c> socket on the same
+ necessary also create an <c>inet</c> socket on the same
port for IPv4 traffic. See the documentation.</p>
<p>
Own Id: OTP-8928 Aux Id: kunagi-193 [104] </p>
diff --git a/lib/kernel/doc/src/os.xml b/lib/kernel/doc/src/os.xml
index 739ac35d2a..6ba69d12a3 100644
--- a/lib/kernel/doc/src/os.xml
+++ b/lib/kernel/doc/src/os.xml
@@ -11,7 +11,7 @@
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-
+
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
@@ -19,7 +19,7 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-
+
</legalnotice>
<title>os</title>
@@ -156,6 +156,32 @@ DirOut = os:cmd("dir"), % on Win32 platform</code>
</func>
<func>
+ <name name="set_signal" arity="2"/>
+ <fsummary>Enables or disables handling of OS signals.</fsummary>
+ <desc>
+ <p>Enables or disables OS signals.</p>
+ <p>Each signal my be set to one of the following options:</p>
+ <taglist>
+ <tag><c>ignore</c></tag>
+ <item>
+ This signal will be ignored.
+ </item>
+
+ <tag><c>default</c></tag>
+ <item>
+ This signal will use the default signal handler for the operating system.
+ </item>
+
+ <tag><c>handle</c></tag>
+ <item>
+ This signal will notify <c>erl_signal_server</c> when it is received by
+ the Erlang runtime system.
+ </item>
+ </taglist>
+ </desc>
+ </func>
+
+ <func>
<name name="system_time" arity="0"/>
<fsummary>Current OS system time.</fsummary>
<desc>
@@ -296,4 +322,3 @@ calendar:now_to_universal_time(TS),
</func>
</funcs>
</erlref>
-
diff --git a/lib/kernel/doc/src/seq_trace.xml b/lib/kernel/doc/src/seq_trace.xml
index ba7259219d..b80e87c118 100644
--- a/lib/kernel/doc/src/seq_trace.xml
+++ b/lib/kernel/doc/src/seq_trace.xml
@@ -427,12 +427,6 @@ prev_cnt := tcurr</code>
built with <c>Erl_Interface</c> only maintains one trace token, which
means that the C-node appears as one process from
the sequential tracing point of view.</p>
- <p>To be able to perform sequential tracing between
- distributed Erlang nodes, the distribution protocol has been
- extended (in a backward compatible way). An Erlang node
- supporting sequential tracing can communicate with an older
- (Erlang/OTP R3B) node but messages passed within that node can
- not be traced.</p>
</section>
<section>
diff --git a/lib/kernel/include/inet.hrl b/lib/kernel/include/inet.hrl
index b39df8c3f2..df788aca08 100644
--- a/lib/kernel/include/inet.hrl
+++ b/lib/kernel/include/inet.hrl
@@ -22,7 +22,7 @@
-record(hostent,
{
- h_name :: inet:hostname(), %% offical name of host
+ h_name :: inet:hostname(), %% official name of host
h_aliases = [] :: [inet:hostname()], %% alias list
h_addrtype :: 'inet' | 'inet6', %% host address type
h_length :: non_neg_integer(), %% length of address
diff --git a/lib/kernel/src/Makefile b/lib/kernel/src/Makefile
index 2b72f78dcf..2a89faaf13 100644
--- a/lib/kernel/src/Makefile
+++ b/lib/kernel/src/Makefile
@@ -71,6 +71,7 @@ MODULES = \
erl_distribution \
erl_epmd \
erl_reply \
+ erl_signal_handler \
erts_debug \
error_handler \
error_logger \
diff --git a/lib/kernel/src/code.erl b/lib/kernel/src/code.erl
index 5a7ca493cc..2a06d0cb15 100644
--- a/lib/kernel/src/code.erl
+++ b/lib/kernel/src/code.erl
@@ -489,13 +489,13 @@ prepare_check_uniq_1([], [_|_]=Errors) ->
{error,Errors}.
partition_on_load(Prep) ->
- P = fun({_,{Bin,_,_}}) ->
- erlang:has_prepared_code_on_load(Bin)
+ P = fun({_,{PC,_,_}}) ->
+ erlang:has_prepared_code_on_load(PC)
end,
lists:partition(P, Prep).
verify_prepared([{M,{Prep,Name,_Native}}|T])
- when is_atom(M), is_binary(Prep), is_list(Name) ->
+ when is_atom(M), is_list(Name) ->
try erlang:has_prepared_code_on_load(Prep) of
false ->
verify_prepared(T);
@@ -562,10 +562,10 @@ prepare_loading_fun() ->
GetNative = get_native_fun(),
fun(Mod, FullName, Beam) ->
case erlang:prepare_loading(Mod, Beam) of
- Prepared when is_binary(Prepared) ->
- {ok,{Prepared,FullName,GetNative(Beam)}};
{error,_}=Error ->
- Error
+ Error;
+ Prepared ->
+ {ok,{Prepared,FullName,GetNative(Beam)}}
end
end.
diff --git a/lib/kernel/src/dist_ac.erl b/lib/kernel/src/dist_ac.erl
index 6c2fa0b6b1..e63c969b79 100644
--- a/lib/kernel/src/dist_ac.erl
+++ b/lib/kernel/src/dist_ac.erl
@@ -123,7 +123,7 @@ load_application(AppName, DistNodes) ->
gen_server:call(?DIST_AC, {load_application, AppName, DistNodes}, infinity).
takeover_application(AppName, RestartType) ->
- case validRestartType(RestartType) of
+ case valid_restart_type(RestartType) of
true ->
wait_for_sync_dacs(),
Nodes = get_nodes(AppName),
@@ -1514,10 +1514,10 @@ dist_del_node(Appls, Node) ->
Appl#appl{run = NRun}
end, Appls).
-validRestartType(permanent) -> true;
-validRestartType(temporary) -> true;
-validRestartType(transient) -> true;
-validRestartType(_RestartType) -> false.
+valid_restart_type(permanent) -> true;
+valid_restart_type(temporary) -> true;
+valid_restart_type(transient) -> true;
+valid_restart_type(_RestartType) -> false.
dist_mismatch(AppName, Node) ->
error_msg("Distribution mismatch for application \"~p\" on nodes ~p and ~p~n",
diff --git a/lib/kernel/src/erl_signal_handler.erl b/lib/kernel/src/erl_signal_handler.erl
new file mode 100644
index 0000000000..8f924d2adc
--- /dev/null
+++ b/lib/kernel/src/erl_signal_handler.erl
@@ -0,0 +1,57 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(erl_signal_handler).
+-behaviour(gen_event).
+-export([init/1, format_status/2,
+ handle_event/2, handle_call/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-record(state,{}).
+
+init(_Args) ->
+ {ok, #state{}}.
+
+handle_event(sigusr1, S) ->
+ erlang:halt("Received SIGUSR1"),
+ {ok, S};
+handle_event(sigquit, S) ->
+ erlang:halt(),
+ {ok, S};
+handle_event(sigterm, S) ->
+ error_logger:info_msg("SIGTERM received - shutting down~n"),
+ ok = init:stop(),
+ {ok, S};
+handle_event(_SignalMsg, S) ->
+ {ok, S}.
+
+handle_info(_Info, S) ->
+ {ok, S}.
+
+handle_call(_Request, S) ->
+ {ok, ok, S}.
+
+format_status(_Opt, [_Pdict,_S]) ->
+ ok.
+
+code_change(_OldVsn, S, _Extra) ->
+ {ok, S}.
+
+terminate(_Args, _S) ->
+ ok.
diff --git a/lib/kernel/src/error_logger.erl b/lib/kernel/src/error_logger.erl
index 3523f680a3..3ee8e2c6e6 100644
--- a/lib/kernel/src/error_logger.erl
+++ b/lib/kernel/src/error_logger.erl
@@ -360,8 +360,12 @@ init(Max) when is_integer(Max) ->
%% go back.
init({go_back, _PostState}) ->
{ok, {?buffer_size, 0, []}};
-init(_) -> %% Start and just relay to other
- {ok, []}. %% node if node(GLeader) =/= node().
+init(_) ->
+ %% The error logger process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ process_flag(message_queue_data, off_heap),
+ {ok, []}.
-spec handle_event(term(), state()) -> {'ok', state()}.
diff --git a/lib/kernel/src/erts_debug.erl b/lib/kernel/src/erts_debug.erl
index 7b3f1e313a..ad92aafc2f 100644
--- a/lib/kernel/src/erts_debug.erl
+++ b/lib/kernel/src/erts_debug.erl
@@ -35,7 +35,8 @@
dump_monitors/1, dump_links/1, flat_size/1,
get_internal_state/1, instructions/0, lock_counters/1,
map_info/1, same/2, set_internal_state/2,
- size_shared/1, copy_shared/1]).
+ size_shared/1, copy_shared/1, dirty_cpu/2, dirty_io/2,
+ dirty/3]).
-spec breakpoint(MFA, Flag) -> non_neg_integer() when
MFA :: {Module :: module(),
@@ -182,6 +183,28 @@ same(_, _) ->
set_internal_state(_, _) ->
erlang:nif_error(undef).
+-spec dirty_cpu(Term1, Term2) -> term() when
+ Term1 :: term(),
+ Term2 :: term().
+
+dirty_cpu(_, _) ->
+ erlang:nif_error(undef).
+
+-spec dirty_io(Term1, Term2) -> term() when
+ Term1 :: term(),
+ Term2 :: term().
+
+dirty_io(_, _) ->
+ erlang:nif_error(undef).
+
+-spec dirty(Term1, Term2, Term3) -> term() when
+ Term1 :: term(),
+ Term2 :: term(),
+ Term3 :: term().
+
+dirty(_, _, _) ->
+ erlang:nif_error(undef).
+
%%% End of BIFs
%% size(Term)
diff --git a/lib/kernel/src/file.erl b/lib/kernel/src/file.erl
index 1971df9038..79e72cdc6d 100644
--- a/lib/kernel/src/file.erl
+++ b/lib/kernel/src/file.erl
@@ -1424,7 +1424,7 @@ path_open_first([Path|Rest], Name, Mode, LastError) ->
case open(FileName, Mode) of
{ok, Fd} ->
{ok, Fd, FileName};
- {error, enoent} ->
+ {error, Reason} when Reason =:= enoent; Reason =:= enotdir ->
path_open_first(Rest, Name, Mode, LastError);
Error ->
Error
diff --git a/lib/kernel/src/inet_parse.erl b/lib/kernel/src/inet_parse.erl
index b0a3ee3008..9b47199e08 100644
--- a/lib/kernel/src/inet_parse.erl
+++ b/lib/kernel/src/inet_parse.erl
@@ -701,8 +701,8 @@ dup(N, E, L) when is_integer(N), N >= 1 ->
-%% Convert IPv4 adress to ascii
-%% Convert IPv6 / IPV4 adress to ascii (plain format)
+%% Convert IPv4 address to ascii
+%% Convert IPv6 / IPV4 address to ascii (plain format)
ntoa({A,B,C,D}) ->
integer_to_list(A) ++ "." ++ integer_to_list(B) ++ "." ++
integer_to_list(C) ++ "." ++ integer_to_list(D);
diff --git a/lib/kernel/src/inet_udp.erl b/lib/kernel/src/inet_udp.erl
index 8a8aa8ecca..c69791b9aa 100644
--- a/lib/kernel/src/inet_udp.erl
+++ b/lib/kernel/src/inet_udp.erl
@@ -113,7 +113,7 @@ fdopen(Fd, Opts) ->
%% Here's how:
%% Reverse the list.
%% For each head option go through the tail and remove
-%% all occurences of the same option from the tail.
+%% all occurrences of the same option from the tail.
%% Store that head option and iterate using the new tail.
%% Return the list of stored head options.
optuniquify(List) ->
@@ -122,8 +122,8 @@ optuniquify(List) ->
optuniquify([], Result) ->
Result;
optuniquify([Opt | Tail], Result) ->
- %% Remove all occurences of Opt in Tail,
- %% prepend Opt to Result,
+ %% Remove all occurrences of Opt in Tail,
+ %% prepend Opt to Result,
%% then iterate back here.
optuniquify(Opt, Tail, [], Result).
diff --git a/lib/kernel/src/kernel.app.src b/lib/kernel/src/kernel.app.src
index 4d08a55c7c..25e4ddd95c 100644
--- a/lib/kernel/src/kernel.app.src
+++ b/lib/kernel/src/kernel.app.src
@@ -34,6 +34,7 @@
erl_boot_server,
erl_distribution,
erl_reply,
+ erl_signal_handler,
error_handler,
error_logger,
file,
diff --git a/lib/kernel/src/kernel.appup.src b/lib/kernel/src/kernel.appup.src
index b505524471..2dc90e2b3e 100644
--- a/lib/kernel/src/kernel.appup.src
+++ b/lib/kernel/src/kernel.appup.src
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
{"%VSN%",
%% Up from - max one major revision back
- [{<<"5\\.[0-1](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.*
+ [{<<"5\\.[0-2](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.*
%% Down to - max one major revision back
- [{<<"5\\.[0-1](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.*
+ [{<<"5\\.[0-2](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.*
}.
diff --git a/lib/kernel/src/kernel.erl b/lib/kernel/src/kernel.erl
index 3d0ef81318..59eca242b1 100644
--- a/lib/kernel/src/kernel.erl
+++ b/lib/kernel/src/kernel.erl
@@ -32,6 +32,14 @@
start(_, []) ->
case supervisor:start_link({local, kernel_sup}, kernel, []) of
{ok, Pid} ->
+ %% add signal handler
+ case whereis(erl_signal_server) of
+ %% in case of minimal mode
+ undefined -> ok;
+ _ ->
+ ok = gen_event:add_handler(erl_signal_server, erl_signal_handler, [])
+ end,
+ %% add error handler
Type = get_error_logger_type(),
case error_logger:swap_handler(Type) of
ok -> {ok, Pid, []};
@@ -131,6 +139,9 @@ init([]) ->
permanent, 2000, worker, [inet_db]},
NetSup = {net_sup, {erl_distribution, start_link, []},
permanent, infinity, supervisor,[erl_distribution]},
+ SigSrv = #{id => erl_signal_server,
+ start => {gen_event, start_link, [{local, erl_signal_server}]},
+ type => worker, restart => permanent, shutdown => 2000, modules => dynamic},
DistAC = start_dist_ac(),
Timer = start_timer(),
@@ -141,7 +152,7 @@ init([]) ->
permanent, infinity, supervisor, [?MODULE]},
{ok, {SupFlags,
[Code, Rpc, Global, InetDb | DistAC] ++
- [NetSup, Glo_grp, File,
+ [NetSup, Glo_grp, File, SigSrv,
StdError, User, Config, SafeSupervisor] ++ Timer}}
end;
init(safe) ->
diff --git a/lib/kernel/src/os.erl b/lib/kernel/src/os.erl
index f8519d3a5e..7e83b17add 100644
--- a/lib/kernel/src/os.erl
+++ b/lib/kernel/src/os.erl
@@ -29,7 +29,7 @@
-export([getenv/0, getenv/1, getenv/2, getpid/0,
perf_counter/0, perf_counter/1,
- putenv/2, system_time/0, system_time/1,
+ putenv/2, set_signal/2, system_time/0, system_time/1,
timestamp/0, unsetenv/1]).
-spec getenv() -> [string()].
@@ -104,6 +104,15 @@ timestamp() ->
unsetenv(_) ->
erlang:nif_error(undef).
+-spec set_signal(Signal, Option) -> 'ok' when
+ Signal :: 'sighup' | 'sigquit' | 'sigabrt' | 'sigalrm' |
+ 'sigterm' | 'sigusr1' | 'sigusr2' | 'sigchld' |
+ 'sigstop' | 'sigtstp',
+ Option :: 'default' | 'handle' | 'ignore'.
+
+set_signal(_Signal, _Option) ->
+ erlang:nif_error(undef).
+
%%% End of BIFs
-spec type() -> {Osfamily, Osname} when
@@ -289,12 +298,11 @@ get_data(Port, MonRef, Eot, Sofar) ->
more ->
get_data(Port, MonRef, Eot, [Sofar,Bytes]);
Last ->
- Port ! {self(), close},
- flush_until_closed(Port),
- flush_exit(Port),
+ catch port_close(Port),
+ flush_until_down(Port, MonRef),
iolist_to_binary([Sofar, Last])
end;
- {'DOWN', MonRef, _, _ , _} ->
+ {'DOWN', MonRef, _, _, _} ->
flush_exit(Port),
iolist_to_binary(Sofar)
end.
@@ -308,18 +316,25 @@ eot(Bs, Eot) ->
binary:part(Bs,{0, Pos})
end.
-flush_until_closed(Port) ->
+%% When port_close returns we know that all the
+%% messages sent have been sent and that the
+%% DOWN message is after them all.
+flush_until_down(Port, MonRef) ->
receive
{Port, {data, _Bytes}} ->
- flush_until_closed(Port);
- {Port, closed} ->
- true
+ flush_until_down(Port, MonRef);
+ {'DOWN', MonRef, _, _, _} ->
+ flush_exit(Port)
end.
+%% The exit signal is always delivered before
+%% the down signal, so we can be sure that if there
+%% was an exit message sent, it will be in the
+%% mailbox now.
flush_exit(Port) ->
receive
{'EXIT', Port, _} ->
ok
- after 1 -> % force context switch
+ after 0 ->
ok
end.
diff --git a/lib/kernel/src/rpc.erl b/lib/kernel/src/rpc.erl
index 21bff02214..bd6ea26678 100644
--- a/lib/kernel/src/rpc.erl
+++ b/lib/kernel/src/rpc.erl
@@ -67,17 +67,27 @@
%%------------------------------------------------------------------------
+
+%% The rex server may receive a huge amount of
+%% messages. Make sure that they are stored off heap to
+%% avoid exessive GCs.
+
+-define(SPAWN_OPTS, [{spawn_opt,[{message_queue_data,off_heap}]}]).
+
%% Remote execution and broadcasting facility
-spec start() -> {'ok', pid()} | 'ignore' | {'error', term()}.
start() ->
- gen_server:start({local,?NAME}, ?MODULE, [], []).
+ gen_server:start({local,?NAME}, ?MODULE, [], ?SPAWN_OPTS).
-spec start_link() -> {'ok', pid()} | 'ignore' | {'error', term()}.
start_link() ->
- gen_server:start_link({local,?NAME}, ?MODULE, [], []).
+ %% The rex server process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ gen_server:start_link({local,?NAME}, ?MODULE, [], ?SPAWN_OPTS).
-spec stop() -> term().
diff --git a/lib/kernel/test/application_SUITE.erl b/lib/kernel/test/application_SUITE.erl
index 81407e9d96..b4cf31b210 100644
--- a/lib/kernel/test/application_SUITE.erl
+++ b/lib/kernel/test/application_SUITE.erl
@@ -1498,7 +1498,7 @@ otp_5363(Conf) when is_list(Conf) ->
%% Ticket: OTP-5606
%% Slogan: Problems with starting a distributed application
%%-----------------------------------------------------------------
-%% Test of several processes simultanously starting the same
+%% Test of several processes simultaneously starting the same
%% distributed application.
otp_5606(Conf) when is_list(Conf) ->
diff --git a/lib/kernel/test/code_SUITE.erl b/lib/kernel/test/code_SUITE.erl
index 4914ce9e4c..19d36a7613 100644
--- a/lib/kernel/test/code_SUITE.erl
+++ b/lib/kernel/test/code_SUITE.erl
@@ -323,7 +323,7 @@ load_abs(Config) when is_list(Config) ->
{error, nofile} = code:load_abs(TestDir ++ "/duuuumy_mod"),
{error, badfile} = code:load_abs(TestDir ++ "/code_a_test"),
{'EXIT', _} = (catch code:load_abs({})),
- {'EXIT', _} = (catch code:load_abs("Non-latin-имя-файла")),
+ {error, nofile} = code:load_abs("Non-latin-имя-файла"),
{module, code_b_test} = code:load_abs(TestDir ++ "/code_b_test"),
code:stick_dir(TestDir),
{error, sticky_directory} = code:load_abs(TestDir ++ "/code_b_test"),
@@ -621,20 +621,28 @@ sticky_compiler(Files, PrivDir) ->
[R || R <- Rets, R =/= ok].
do_sticky_compile(Mod, Dir) ->
- %% Make sure that the module is loaded. A module being sticky
- %% only prevents it from begin reloaded, not from being loaded
- %% from the wrong place to begin with.
- Mod = Mod:module_info(module),
- File = filename:append(Dir, atom_to_list(Mod)),
- Src = io_lib:format("-module(~s).\n"
- "-export([test/1]).\n"
- "test(me) -> fail.\n", [Mod]),
- ok = file:write_file(File++".erl", Src),
- case c:c(File, [{outdir,Dir}]) of
- {ok,Module} ->
- Module:test(me);
- {error,sticky_directory} ->
- ok
+ case code:is_sticky(Mod) of
+ true ->
+ %% Make sure that the module is loaded. A module being sticky
+ %% only prevents it from begin reloaded, not from being loaded
+ %% from the wrong place to begin with.
+ Mod = Mod:module_info(module),
+ File = filename:append(Dir, atom_to_list(Mod)),
+ Src = io_lib:format("-module(~s).\n"
+ "-export([test/1]).\n"
+ "test(me) -> fail.\n", [Mod]),
+ ok = file:write_file(File++".erl", Src),
+ case c:c(File, [{outdir,Dir}]) of
+ {ok,Module} ->
+ Module:test(me);
+ {error,sticky_directory} ->
+ ok
+ end;
+ false ->
+ %% For some reason the module is not sticky
+ %% could be that the .erlang file has
+ %% unstuck it?
+ {Mod, is_not_sticky}
end.
%% Test that the -pa and -pz options work as expected.
@@ -1352,9 +1360,8 @@ create_big_boot(Config) ->
%% corresponding beam file (if hipe is not enabled).
filter_app("hipe",_) -> false;
-%% Dialyzer and typer depends on hipe
+%% Dialyzer depends on hipe
filter_app("dialyzer",_) -> false;
-filter_app("typer",_) -> false;
%% Orber requires explicit configuration
filter_app("orber",_) -> false;
diff --git a/lib/kernel/test/erl_distribution_SUITE.erl b/lib/kernel/test/erl_distribution_SUITE.erl
index f630896e03..09c80a0956 100644
--- a/lib/kernel/test/erl_distribution_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_SUITE.erl
@@ -233,7 +233,7 @@ time_ping(Node) ->
erlang:convert_time_unit(T1 - T0, native, millisecond).
%% Keep the connection with the client node up.
-%% This is neccessary as the client node runs with much shorter
+%% This is necessary as the client node runs with much shorter
%% tick time !!
keep_conn(Node) ->
sleep(1),
@@ -1059,7 +1059,7 @@ monitor_nodes_otp_6481_test(Config, TestType) when is_list(Config) ->
RemotePid = spawn(Node,
fun () ->
receive after 1500 -> ok end,
- %% infinit loop of msgs
+ %% infinite loop of msgs
%% we want an endless stream of messages and the kill
%% the node mercilessly.
%% We then want to ensure that the nodedown message arrives
diff --git a/lib/kernel/test/erl_distribution_wb_SUITE.erl b/lib/kernel/test/erl_distribution_wb_SUITE.erl
index 6a23ad0d11..61aa3b32ee 100644
--- a/lib/kernel/test/erl_distribution_wb_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_wb_SUITE.erl
@@ -30,7 +30,7 @@
%% 1)
%%
-%% Connections are now always set up symetrically with respect to
+%% Connections are now always set up symmetrically with respect to
%% publication. If connecting node doesn't send DFLAG_PUBLISHED
%% the other node wont send DFLAG_PUBLISHED. If the connecting
%% node send DFLAG_PUBLISHED but the other node doesn't send
diff --git a/lib/kernel/test/error_logger_SUITE.erl b/lib/kernel/test/error_logger_SUITE.erl
index b6e7551741..bb01c2384d 100644
--- a/lib/kernel/test/error_logger_SUITE.erl
+++ b/lib/kernel/test/error_logger_SUITE.erl
@@ -30,6 +30,7 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
+ off_heap/1,
error_report/1, info_report/1, error/1, info/1,
emulator/1, tty/1, logfile/1, add/1, delete/1]).
@@ -45,7 +46,7 @@ suite() ->
{timetrap,{minutes,1}}].
all() ->
- [error_report, info_report, error, info, emulator, tty,
+ [off_heap, error_report, info_report, error, info, emulator, tty,
logfile, add, delete].
groups() ->
@@ -66,6 +67,16 @@ end_per_group(_GroupName, Config) ->
%%-----------------------------------------------------------------
+off_heap(_Config) ->
+ %% The error_logger process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ MQD = message_queue_data,
+ {MQD,off_heap} = process_info(whereis(error_logger), MQD),
+ ok.
+
+%%-----------------------------------------------------------------
+
error_report(Config) when is_list(Config) ->
error_logger:add_report_handler(?MODULE, self()),
Rep1 = [{tag1,"data1"},{tag2,data2},{tag3,3}],
diff --git a/lib/kernel/test/file_SUITE.erl b/lib/kernel/test/file_SUITE.erl
index f2094431d8..b402f01758 100644
--- a/lib/kernel/test/file_SUITE.erl
+++ b/lib/kernel/test/file_SUITE.erl
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
%%
-%% This is a developement feature when developing a new file module,
+%% This is a development feature when developing a new file module,
%% ugly but practical.
-ifndef(FILE_MODULE).
-define(FILE_MODULE, file).
diff --git a/lib/kernel/test/file_SUITE_data/realmen.html b/lib/kernel/test/file_SUITE_data/realmen.html
index c810a5d088..92e13f23b8 100644
--- a/lib/kernel/test/file_SUITE_data/realmen.html
+++ b/lib/kernel/test/file_SUITE_data/realmen.html
@@ -237,7 +237,7 @@ destroy most of the interesting uses for EQUIVALENCE, and make it
impossible to modify the operating system code with negative
subscripts. Worst of all, bounds checking is inefficient.
-<LI> Source code maintainance systems. A Real Programmer keeps his
+<LI> Source code maintenance systems. A Real Programmer keeps his
code locked up in a card file, because it implies that its owner
cannot leave his important programs unguarded [5].
@@ -396,7 +396,7 @@ double stuff Oreos for special occasions.
<LI> Underneath the Oreos is a flow-charting template, left there by
the previous occupant of the office. (Real Programmers write programs,
-not documentation. Leave that to the maintainence people.)
+not documentation. Leave that to the maintenance people.)
</UL> <P>
diff --git a/lib/kernel/test/multi_load_SUITE.erl b/lib/kernel/test/multi_load_SUITE.erl
index 369e25ac64..920839f4f9 100644
--- a/lib/kernel/test/multi_load_SUITE.erl
+++ b/lib/kernel/test/multi_load_SUITE.erl
@@ -144,14 +144,14 @@ prep_magic([H|T]) ->
prep_magic(Tuple) when is_tuple(Tuple) ->
L = prep_magic(tuple_to_list(Tuple)),
list_to_tuple(L);
-prep_magic(Bin) when is_binary(Bin) ->
- try erlang:has_prepared_code_on_load(Bin) of
+prep_magic(Ref) when is_reference(Ref) ->
+ try erlang:has_prepared_code_on_load(Ref) of
false ->
- %% Create a different kind of magic binary.
+ %% Create a different kind of magic ref.
ets:match_spec_compile([{'_',[true],['$_']}])
catch
_:_ ->
- Bin
+ Ref
end;
prep_magic(Other) ->
Other.
diff --git a/lib/kernel/test/rpc_SUITE.erl b/lib/kernel/test/rpc_SUITE.erl
index 1c72ddc87f..d76c4097d8 100644
--- a/lib/kernel/test/rpc_SUITE.erl
+++ b/lib/kernel/test/rpc_SUITE.erl
@@ -21,7 +21,8 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
--export([call/1, block_call/1, multicall/1, multicall_timeout/1,
+-export([off_heap/1,
+ call/1, block_call/1, multicall/1, multicall_timeout/1,
multicall_dies/1, multicall_node_dies/1,
called_dies/1, called_node_dies/1,
called_throws/1, call_benchmark/1, async_call/1]).
@@ -35,7 +36,7 @@ suite() ->
{timetrap,{minutes,2}}].
all() ->
- [call, block_call, multicall, multicall_timeout,
+ [off_heap, call, block_call, multicall, multicall_timeout,
multicall_dies, multicall_node_dies, called_dies,
called_node_dies, called_throws, call_benchmark,
async_call].
@@ -55,6 +56,13 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
+off_heap(_Config) ->
+ %% The rex server process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ MQD = message_queue_data,
+ {MQD,off_heap} = process_info(whereis(rex), MQD),
+ ok.
%% Test different rpc calls.
diff --git a/lib/megaco/src/text/megaco_text_gen_prev3a.hrl b/lib/megaco/src/text/megaco_text_gen_prev3a.hrl
index ae4a990779..9c75ee5926 100644
--- a/lib/megaco/src/text/megaco_text_gen_prev3a.hrl
+++ b/lib/megaco/src/text/megaco_text_gen_prev3a.hrl
@@ -424,7 +424,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid,
transactionResult = Res,
%% These fields are actually not
%% supported in this implementation,
- %% but because the messanger module
+ %% but because the messenger module
%% cannot see any diff between the
%% various v3 implementations...
segmentNumber = asn1_NOVALUE,
diff --git a/lib/megaco/src/text/megaco_text_gen_prev3b.hrl b/lib/megaco/src/text/megaco_text_gen_prev3b.hrl
index e7fb85d137..7e85be4d64 100644
--- a/lib/megaco/src/text/megaco_text_gen_prev3b.hrl
+++ b/lib/megaco/src/text/megaco_text_gen_prev3b.hrl
@@ -424,7 +424,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid,
transactionResult = Res,
%% These fields are actually not
%% supported in this implementation,
- %% but because the messanger module
+ %% but because the messenger module
%% cannot see any diff between the
%% various v3 implementations...
segmentNumber = asn1_NOVALUE,
diff --git a/lib/megaco/src/text/megaco_text_gen_prev3c.hrl b/lib/megaco/src/text/megaco_text_gen_prev3c.hrl
index 722e97a743..700392efe2 100644
--- a/lib/megaco/src/text/megaco_text_gen_prev3c.hrl
+++ b/lib/megaco/src/text/megaco_text_gen_prev3c.hrl
@@ -434,7 +434,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid,
transactionResult = Res,
%% These fields are actually not
%% supported in this implementation,
- %% but because the messanger module
+ %% but because the messenger module
%% cannot see any diff between the
%% various v3 implementations...
segmentNumber = asn1_NOVALUE,
diff --git a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
index a83d1d77d2..62759c624b 100644
--- a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
+++ b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
@@ -362,11 +362,6 @@ ok
<seealso marker="mnesia_frag_hash">mnesia_frag_hash</seealso>
callback behavior. This property can explicitly be set at
table creation. Default is <c>mnesia_frag_hash</c>.</p>
- <p>Older tables, that were created before the concept of
- user-defined hash modules was introduced, use module
- <c>mnesia_frag_old_hash</c> to be backwards compatible.
- <c>mnesia_frag_old_hash</c> still uses the poor
- deprecated function <c>erlang:hash/1</c>.</p>
</item>
<tag><c>{hash_state, Term}</c></tag>
<item>
diff --git a/lib/mnesia/doc/src/notes.xml b/lib/mnesia/doc/src/notes.xml
index 51c98d0d3e..9f59759cb6 100644
--- a/lib/mnesia/doc/src/notes.xml
+++ b/lib/mnesia/doc/src/notes.xml
@@ -39,7 +39,23 @@
thus constitutes one section in this document. The title of each
section is the version number of Mnesia.</p>
- <section><title>Mnesia 4.14.2</title>
+ <section><title>Mnesia 4.14.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fixed crash in checkpoint handling when table was deleted
+ during backup.</p>
+ <p>
+ Own Id: OTP-14167</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Mnesia 4.14.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/mnesia/src/Makefile b/lib/mnesia/src/Makefile
index 5206e469a5..b68fc7d3d0 100644
--- a/lib/mnesia/src/Makefile
+++ b/lib/mnesia/src/Makefile
@@ -55,7 +55,6 @@ MODULES= \
mnesia_ext_sup \
mnesia_frag \
mnesia_frag_hash \
- mnesia_frag_old_hash \
mnesia_index \
mnesia_kernel_sup \
mnesia_late_loader \
diff --git a/lib/mnesia/src/mnesia.app.src b/lib/mnesia/src/mnesia.app.src
index af14826c90..a5d74d2d36 100644
--- a/lib/mnesia/src/mnesia.app.src
+++ b/lib/mnesia/src/mnesia.app.src
@@ -15,7 +15,6 @@
mnesia_ext_sup,
mnesia_frag,
mnesia_frag_hash,
- mnesia_frag_old_hash,
mnesia_index,
mnesia_kernel_sup,
mnesia_late_loader,
diff --git a/lib/mnesia/src/mnesia.erl b/lib/mnesia/src/mnesia.erl
index 6de7214776..dece995d39 100644
--- a/lib/mnesia/src/mnesia.erl
+++ b/lib/mnesia/src/mnesia.erl
@@ -316,7 +316,6 @@ ms() ->
mnesia_loader,
mnesia_frag,
mnesia_frag_hash,
- mnesia_frag_old_hash,
mnesia_index,
mnesia_kernel_sup,
mnesia_late_loader,
diff --git a/lib/mnesia/src/mnesia.hrl b/lib/mnesia/src/mnesia.hrl
index 0716dd87c8..da7e662288 100644
--- a/lib/mnesia/src/mnesia.hrl
+++ b/lib/mnesia/src/mnesia.hrl
@@ -49,12 +49,12 @@
%% It's important that counter is first, since we compare tid's
--record(tid,
+-record(tid,
{counter, %% serial no for tid
pid}). %% owner of tid
--record(tidstore,
+-record(tidstore,
{store, %% current ets table for tid
up_stores = [], %% list of upper layer stores for nested trans
level = 1}). %% transaction level
@@ -128,5 +128,4 @@
mnesia_lib:eval_debug_fun(I, C, ?FILE, ?LINE)).
-else.
-define(eval_debug_fun(I, C), ok).
--endif.
-
+-endif.
diff --git a/lib/mnesia/src/mnesia_checkpoint.erl b/lib/mnesia/src/mnesia_checkpoint.erl
index 9eb939e8d3..fc626940b4 100644
--- a/lib/mnesia/src/mnesia_checkpoint.erl
+++ b/lib/mnesia/src/mnesia_checkpoint.erl
@@ -909,7 +909,7 @@ retainer_loop(Cp = #checkpoint_args{name=Name}) ->
retainer_loop(Cp2);
{From, {iter_end, Iter}} ->
- retainer_fixtable(Iter#iter.oid_tab, false),
+ ?SAFE(retainer_fixtable(Iter#iter.oid_tab, false)),
Iters = Cp#checkpoint_args.iterators -- [Iter],
reply(From, Name, ok),
retainer_loop(Cp#checkpoint_args{iterators = Iters});
@@ -971,7 +971,8 @@ do_stop(Cp) ->
unset({checkpoint, Name}),
lists:foreach(fun deactivate_tab/1, Cp#checkpoint_args.retainers),
Iters = Cp#checkpoint_args.iterators,
- lists:foreach(fun(I) -> retainer_fixtable(I#iter.oid_tab, false) end, Iters).
+ [?SAFE(retainer_fixtable(Tab, false)) || #iter{main_tab=Tab} <- Iters],
+ ok.
deactivate_tab(R) ->
Name = R#retainer.cp_name,
@@ -1151,7 +1152,7 @@ do_change_copy(Cp, Tab, FromType, ToType) ->
Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
check_iter(From, Iter) when Iter#iter.pid == From ->
- retainer_fixtable(Iter#iter.oid_tab, false),
+ ?SAFE(retainer_fixtable(Iter#iter.oid_tab, false)),
false;
check_iter(_From, _Iter) ->
true.
diff --git a/lib/mnesia/src/mnesia_event.erl b/lib/mnesia/src/mnesia_event.erl
index 7320d381ea..6f7531245f 100644
--- a/lib/mnesia/src/mnesia_event.erl
+++ b/lib/mnesia/src/mnesia_event.erl
@@ -114,7 +114,8 @@ handle_table_event({Oper, Record, TransId}, State) ->
handle_system_event({mnesia_checkpoint_activated, _Checkpoint}, State) ->
{ok, State};
-handle_system_event({mnesia_checkpoint_deactivated, _Checkpoint}, State) ->
+handle_system_event({mnesia_checkpoint_deactivated, Checkpoint}, State) ->
+ report_error("Checkpoint '~p' has been deactivated, last table copy deleted.\n",[Checkpoint]),
{ok, State};
handle_system_event({mnesia_up, Node}, State) ->
diff --git a/lib/mnesia/src/mnesia_frag.erl b/lib/mnesia/src/mnesia_frag.erl
index c6e812b36d..c39f30e140 100644
--- a/lib/mnesia/src/mnesia_frag.erl
+++ b/lib/mnesia/src/mnesia_frag.erl
@@ -58,9 +58,7 @@
-include("mnesia.hrl").
--define(OLD_HASH_MOD, mnesia_frag_old_hash).
-define(DEFAULT_HASH_MOD, mnesia_frag_hash).
-%%-define(DEFAULT_HASH_MOD, ?OLD_HASH_MOD). %% BUGBUG: New should be default
-record(frag_state,
{foreign_key,
@@ -80,7 +78,7 @@
lock(ActivityId, Opaque, {table , Tab}, LockKind) ->
case frag_names(Tab) of
[Tab] ->
- mnesia:lock(ActivityId, Opaque, {table, Tab}, LockKind);
+ mnesia:lock(ActivityId, Opaque, {table, Tab}, LockKind);
Frags ->
DeepNs = [mnesia:lock(ActivityId, Opaque, {table, F}, LockKind) ||
F <- Frags],
@@ -321,7 +319,7 @@ init_select(Tid,Opaque,Tab,Pat,Limit,LockKind) ->
{'EXIT', _} ->
mnesia:select(Tid, Opaque, Tab, Pat, Limit,LockKind);
FH ->
- FragNumbers = verify_numbers(FH,Pat),
+ FragNumbers = verify_numbers(FH,Pat),
Fun = fun(Num) ->
Name = n_to_frag_name(Tab, Num),
Node = val({Name, where_to_read}),
@@ -336,19 +334,19 @@ init_select(Tid,Opaque,Tab,Pat,Limit,LockKind) ->
end.
select_cont(_Tid,_,{frag_cont, '$end_of_table', [],_}) -> '$end_of_table';
-select_cont(Tid,Ts,{frag_cont, '$end_of_table', [{Tab,Node,Type}|Rest],Args}) ->
+select_cont(Tid,Ts,{frag_cont, '$end_of_table', [{Tab,Node,Type}|Rest],Args}) ->
{Spec,LockKind,Limit} = Args,
InitFun = fun(FixedSpec) -> mnesia:dirty_sel_init(Node,Tab,FixedSpec,Limit,Type) end,
Res = mnesia:fun_select(Tid,Ts,Tab,Spec,LockKind,Tab,InitFun,Limit,Node,Type),
frag_sel_cont(Res, Rest, Args);
-select_cont(Tid,Ts,{frag_cont, Cont, TabL, Args}) ->
+select_cont(Tid,Ts,{frag_cont, Cont, TabL, Args}) ->
frag_sel_cont(mnesia:select_cont(Tid,Ts,Cont),TabL,Args);
select_cont(Tid,Ts,Else) -> %% Not a fragmented table
mnesia:select_cont(Tid,Ts,Else).
frag_sel_cont('$end_of_table', [],_) ->
'$end_of_table';
-frag_sel_cont('$end_of_table', TabL,Args) ->
+frag_sel_cont('$end_of_table', TabL,Args) ->
{[], {frag_cont, '$end_of_table', TabL,Args}};
frag_sel_cont({Recs,Cont}, TabL,Args) ->
{Recs, {frag_cont, Cont, TabL,Args}}.
@@ -358,9 +356,9 @@ do_select(ActivityId, Opaque, Tab, MatchSpec, LockKind) ->
{'EXIT', _} ->
mnesia:select(ActivityId, Opaque, Tab, MatchSpec, LockKind);
FH ->
- FragNumbers = verify_numbers(FH,MatchSpec),
+ FragNumbers = verify_numbers(FH,MatchSpec),
Fun = fun(Num) ->
- Name = n_to_frag_name(Tab, Num),
+ Name = n_to_frag_name(Tab, Num),
Node = val({Name, where_to_read}),
mnesia:lock(ActivityId, Opaque, {table, Name}, LockKind),
{Name, Node}
@@ -398,7 +396,7 @@ do_select(ActivityId, Opaque, Tab, MatchSpec, LockKind) ->
verify_numbers(FH,MatchSpec) ->
HashState = FH#frag_state.hash_state,
- FragNumbers =
+ FragNumbers =
case FH#frag_state.hash_module of
HashMod when HashMod == ?DEFAULT_HASH_MOD ->
?DEFAULT_HASH_MOD:match_spec_to_frag_numbers(HashState, MatchSpec);
@@ -434,7 +432,7 @@ local_select(ReplyTo, Ref, RemoteNameNodes, MatchSpec) ->
end,
unlink(ReplyTo),
exit(normal).
-
+
remote_select(ReplyTo, Ref, NameNodes, MatchSpec) ->
do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec).
@@ -805,22 +803,22 @@ make_deactivate(Tab) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Add a fragment to a fragmented table and fill it with half of
%% the records from one of the old fragments
-
+
make_multi_add_frag(Tab, SortedNs) when is_list(SortedNs) ->
verify_multi(Tab),
Ops = make_add_frag(Tab, SortedNs),
%% Propagate to foreigners
MoreOps = [make_add_frag(T, SortedNs) || T <- lookup_foreigners(Tab)],
- [Ops | MoreOps];
+ [Ops | MoreOps];
make_multi_add_frag(Tab, SortedNs) ->
mnesia:abort({bad_type, Tab, SortedNs}).
verify_multi(Tab) ->
FH = lookup_frag_hash(Tab),
ForeignKey = FH#frag_state.foreign_key,
- mnesia_schema:verify(undefined, ForeignKey,
- {combine_error, Tab,
+ mnesia_schema:verify(undefined, ForeignKey,
+ {combine_error, Tab,
"Op only allowed via foreign table",
{foreign_key, ForeignKey}}).
@@ -839,7 +837,7 @@ make_frag_names_and_acquire_locks(Tab, N, FragIndecies, DoNotLockN) ->
end,
FragNames = erlang:make_tuple(N, undefined),
lists:foldl(Fun, FragNames, FragIndecies).
-
+
make_add_frag(Tab, SortedNs) ->
Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
mnesia_schema:ensure_active(Cs),
@@ -849,8 +847,8 @@ make_add_frag(Tab, SortedNs) ->
FragNames = make_frag_names_and_acquire_locks(Tab, N, WriteIndecies, true),
NewFrag = element(N, FragNames),
- NR = length(Cs#cstruct.ram_copies),
- ND = length(Cs#cstruct.disc_copies),
+ NR = length(Cs#cstruct.ram_copies),
+ ND = length(Cs#cstruct.disc_copies),
NDO = length(Cs#cstruct.disc_only_copies),
NExt = length(Cs#cstruct.external_copies),
NewCs = Cs#cstruct{name = NewFrag,
@@ -859,7 +857,7 @@ make_add_frag(Tab, SortedNs) ->
disc_copies = [],
disc_only_copies = [],
external_copies = []},
-
+
{NewCs2, _, _} = set_frag_nodes(NR, ND, NDO, NExt, NewCs, SortedNs, []),
[NewOp] = mnesia_schema:make_create_table(NewCs2),
@@ -944,7 +942,7 @@ do_split(FH, OldN, FragNames, [Rec | Recs], Ops) ->
Key = element(2, Rec),
NewOid = {NewFrag, Key},
OldOid = {OldFrag, Key},
- Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
{op, rec, unknown, {OldOid, [OldOid], delete}} | Ops],
do_split(FH, OldN, FragNames, Recs, Ops2);
_NewFrag ->
@@ -958,7 +956,7 @@ do_split(_FH, _OldN, _FragNames, [], Ops) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Delete a fragment from a fragmented table
%% and merge its records with another fragment
-
+
make_multi_del_frag(Tab) ->
verify_multi(Tab),
Ops = make_del_frag(Tab),
@@ -1064,7 +1062,7 @@ do_merge(FH, OldN, FragNames, [Rec | Recs], Ops) ->
Key = element(2, Rec),
NewOid = {NewFrag, Key},
OldOid = {OldFrag, Key},
- Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
{op, rec, unknown, {OldOid, [OldOid], delete}} | Ops],
do_merge(FH, OldN, FragNames, Recs, Ops2);
_NewFrag ->
@@ -1077,7 +1075,7 @@ do_merge(FH, OldN, FragNames, [Rec | Recs], Ops) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Add a node to the node pool of a fragmented table
-
+
make_multi_add_node(Tab, Node) ->
verify_multi(Tab),
Ops = make_add_node(Tab, Node),
@@ -1085,7 +1083,7 @@ make_multi_add_node(Tab, Node) ->
%% Propagate to foreigners
MoreOps = [make_add_node(T, Node) || T <- lookup_foreigners(Tab)],
[Ops | MoreOps].
-
+
make_add_node(Tab, Node) when is_atom(Node) ->
Pool = lookup_prop(Tab, node_pool),
case lists:member(Node, Pool) of
@@ -1114,7 +1112,7 @@ make_multi_del_node(Tab, Node) ->
%% Propagate to foreigners
MoreOps = [make_del_node(T, Node) || T <- lookup_foreigners(Tab)],
[Ops | MoreOps].
-
+
make_del_node(Tab, Node) when is_atom(Node) ->
Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
mnesia_schema:ensure_active(Cs),
@@ -1147,8 +1145,8 @@ remove_node(Node, Cs) ->
case lists:member(Node, Pool) of
true ->
Pool2 = Pool -- [Node],
- Props = lists:keyreplace(node_pool, 1,
- Cs#cstruct.frag_properties,
+ Props = lists:keyreplace(node_pool, 1,
+ Cs#cstruct.frag_properties,
{node_pool, Pool2}),
{Cs#cstruct{frag_properties = Props}, true};
false ->
@@ -1180,18 +1178,10 @@ props_to_frag_hash(Tab, Props) ->
T when T == Tab ->
Foreign = mnesia_schema:pick(Tab, foreign_key, Props, must),
N = mnesia_schema:pick(Tab, n_fragments, Props, must),
-
case mnesia_schema:pick(Tab, hash_module, Props, undefined) of
undefined ->
- Split = mnesia_schema:pick(Tab, next_n_to_split, Props, must),
- Doubles = mnesia_schema:pick(Tab, n_doubles, Props, must),
- FH = {frag_hash, Foreign, N, Split, Doubles},
- HashState = ?OLD_HASH_MOD:init_state(Tab, FH),
- #frag_state{foreign_key = Foreign,
- n_fragments = N,
- hash_module = ?OLD_HASH_MOD,
- hash_state = HashState};
- HashMod ->
+ no_hash;
+ HashMod ->
HashState = mnesia_schema:pick(Tab, hash_state, Props, must),
#frag_state{foreign_key = Foreign,
n_fragments = N,
@@ -1216,13 +1206,9 @@ lookup_frag_hash(Tab) ->
case ?catch_val({Tab, frag_hash}) of
FH when is_record(FH, frag_state) ->
FH;
- {frag_hash, K, N, _S, _D} = FH ->
+ {frag_hash, _K, _N, _S, _D} ->
%% Old style. Kept for backwards compatibility.
- HashState = ?OLD_HASH_MOD:init_state(Tab, FH),
- #frag_state{foreign_key = K,
- n_fragments = N,
- hash_module = ?OLD_HASH_MOD,
- hash_state = HashState};
+ mnesia:abort({no_hash, Tab, frag_properties, frag_hash});
{'EXIT', _} ->
mnesia:abort({no_exists, Tab, frag_properties, frag_hash})
end.
@@ -1249,10 +1235,10 @@ key_pos(FH) ->
case FH#frag_state.foreign_key of
undefined ->
2;
- {_ForeignTab, Pos} ->
+ {_ForeignTab, Pos} ->
Pos
end.
-
+
%% Returns name of fragment table
key_to_frag_name({BaseTab, _} = Tab, Key) ->
N = key_to_frag_number(Tab, Key),
diff --git a/lib/mnesia/src/mnesia_frag_old_hash.erl b/lib/mnesia/src/mnesia_frag_old_hash.erl
deleted file mode 100644
index b246c76236..0000000000
--- a/lib/mnesia/src/mnesia_frag_old_hash.erl
+++ /dev/null
@@ -1,133 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2002-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%%
-%%%----------------------------------------------------------------------
-%%% Purpose : Implements hashing functionality for fragmented tables
-%%%----------------------------------------------------------------------
-
--module(mnesia_frag_old_hash).
-%%-behaviour(mnesia_frag_hash).
-
--compile({nowarn_deprecated_function, {erlang,hash,2}}).
-
-%% Hashing callback functions
--export([
- init_state/2,
- add_frag/1,
- del_frag/1,
- key_to_frag_number/2,
- match_spec_to_frag_numbers/2
- ]).
-
--record(old_hash_state,
- {n_fragments,
- next_n_to_split,
- n_doubles}).
-
-%% Old style. Kept for backwards compatibility.
--record(frag_hash,
- {foreign_key,
- n_fragments,
- next_n_to_split,
- n_doubles}).
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-init_state(_Tab, InitialState) when InitialState == undefined ->
- #old_hash_state{n_fragments = 1,
- next_n_to_split = 1,
- n_doubles = 0};
-init_state(_Tab, FH) when is_record(FH, frag_hash) ->
- %% Old style. Kept for backwards compatibility.
- #old_hash_state{n_fragments = FH#frag_hash.n_fragments,
- next_n_to_split = FH#frag_hash.next_n_to_split,
- n_doubles = FH#frag_hash.n_doubles}.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-add_frag(State) when is_record(State, old_hash_state) ->
- SplitN = State#old_hash_state.next_n_to_split,
- P = SplitN + 1,
- L = State#old_hash_state.n_doubles,
- NewN = State#old_hash_state.n_fragments + 1,
- State2 = case trunc(math:pow(2, L)) + 1 of
- P2 when P2 == P ->
- State#old_hash_state{n_fragments = NewN,
- next_n_to_split = 1,
- n_doubles = L + 1};
- _ ->
- State#old_hash_state{n_fragments = NewN,
- next_n_to_split = P}
- end,
- {State2, [SplitN], [NewN]}.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-del_frag(State) when is_record(State, old_hash_state) ->
- P = State#old_hash_state.next_n_to_split - 1,
- L = State#old_hash_state.n_doubles,
- N = State#old_hash_state.n_fragments,
- if
- P < 1 ->
- L2 = L - 1,
- MergeN = trunc(math:pow(2, L2)),
- State2 = State#old_hash_state{n_fragments = N - 1,
- next_n_to_split = MergeN,
- n_doubles = L2},
- {State2, [N], [MergeN]};
- true ->
- MergeN = P,
- State2 = State#old_hash_state{n_fragments = N - 1,
- next_n_to_split = MergeN},
- {State2, [N], [MergeN]}
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-key_to_frag_number(State, Key) when is_record(State, old_hash_state) ->
- L = State#old_hash_state.n_doubles,
- A = erlang:hash(Key, trunc(math:pow(2, L))),
- P = State#old_hash_state.next_n_to_split,
- if
- A < P ->
- erlang:hash(Key, trunc(math:pow(2, L + 1)));
- true ->
- A
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-match_spec_to_frag_numbers(State, MatchSpec) when is_record(State, old_hash_state) ->
- case MatchSpec of
- [{HeadPat, _, _}] when is_tuple(HeadPat), tuple_size(HeadPat) > 2 ->
- KeyPat = element(2, HeadPat),
- case has_var(KeyPat) of
- false ->
- [key_to_frag_number(State, KeyPat)];
- true ->
- lists:seq(1, State#old_hash_state.n_fragments)
- end;
- _ ->
- lists:seq(1, State#old_hash_state.n_fragments)
- end.
-
-has_var(Pat) ->
- mnesia:has_var(Pat).
diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl
index ab78c9b13e..ff58974aba 100644
--- a/lib/mnesia/src/mnesia_monitor.erl
+++ b/lib/mnesia/src/mnesia_monitor.erl
@@ -169,7 +169,7 @@ check_protocol([{Node, {accept, Mon, Version, Protocol}} | Tail], Protocols) ->
verbose("Failed to connect with ~p. ~p protocols rejected. "
"expected version = ~p, expected protocol = ~p~n",
[Node, Protocols, Version, Protocol]),
- unlink(Mon), % Get rid of unneccessary link
+ unlink(Mon), % Get rid of unnecessary link
check_protocol(Tail, Protocols)
end;
check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) ->
diff --git a/lib/mnesia/src/mnesia_schema.erl b/lib/mnesia/src/mnesia_schema.erl
index 0e4017e4c3..b0d7965886 100644
--- a/lib/mnesia/src/mnesia_schema.erl
+++ b/lib/mnesia/src/mnesia_schema.erl
@@ -1941,7 +1941,7 @@ make_change_table_copy_type(Tab, Node, ToS) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% change index functions ....
-%% Pos is allready added by 1 in both of these functions
+%% Pos is already added by 1 in both of these functions
add_table_index(Tab, Pos) ->
schema_transaction(fun() -> do_add_table_index(Tab, Pos) end).
diff --git a/lib/mnesia/test/mnesia_evil_backup.erl b/lib/mnesia/test/mnesia_evil_backup.erl
index e745ec9b04..044cf501fd 100644
--- a/lib/mnesia/test/mnesia_evil_backup.erl
+++ b/lib/mnesia/test/mnesia_evil_backup.erl
@@ -723,18 +723,18 @@ bup_records(File, Mod) ->
exit(Reason)
end.
-sops_with_checkpoint(doc) ->
+sops_with_checkpoint(doc) ->
["Test schema operations during a checkpoint"];
sops_with_checkpoint(suite) -> [];
sops_with_checkpoint(Config) when is_list(Config) ->
- Ns = ?acquire_nodes(2, Config),
-
+ Ns = [N1,N2] = ?acquire_nodes(2, Config),
+
?match({ok, cp1, Ns}, mnesia:activate_checkpoint([{name, cp1},{max,mnesia:system_info(tables)}])),
- Tab = tab,
+ Tab = tab,
?match({atomic, ok}, mnesia:create_table(Tab, [{disc_copies,Ns}])),
OldRecs = [{Tab, K, -K} || K <- lists:seq(1, 5)],
[mnesia:dirty_write(R) || R <- OldRecs],
-
+
?match({ok, cp2, Ns}, mnesia:activate_checkpoint([{name, cp2},{max,mnesia:system_info(tables)}])),
File1 = "cp1_delete_me.BUP",
?match(ok, mnesia:dirty_write({Tab,6,-6})),
@@ -742,16 +742,16 @@ sops_with_checkpoint(Config) when is_list(Config) ->
?match(ok, mnesia:dirty_write({Tab,7,-7})),
File2 = "cp2_delete_me.BUP",
?match(ok, mnesia:backup_checkpoint(cp2, File2)),
-
+
?match(ok, mnesia:deactivate_checkpoint(cp1)),
?match(ok, mnesia:backup_checkpoint(cp2, File1)),
?match(ok, mnesia:dirty_write({Tab,8,-8})),
-
+
?match({atomic,ok}, mnesia:delete_table(Tab)),
?match({error,_}, mnesia:backup_checkpoint(cp2, File2)),
?match({'EXIT',_}, mnesia:dirty_write({Tab,9,-9})),
- ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])),
+ ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])),
Test = fun(N) when N > 5 -> ?error("To many records in backup ~p ~n", [N]);
(N) -> case mnesia:dirty_read(Tab,N) of
[{Tab,N,B}] when -B =:= N -> ok;
@@ -759,8 +759,29 @@ sops_with_checkpoint(Config) when is_list(Config) ->
end
end,
[Test(N) || N <- mnesia:dirty_all_keys(Tab)],
- ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])),
-
+ ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])),
+
+ %% Mnesia crashes when deleting a table during backup
+ ?match([], mnesia_test_lib:stop_mnesia([N2])),
+ Tab2 = ram,
+ ?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies,[N1]}])),
+ ?match({ok, cp3, _}, mnesia:activate_checkpoint([{name, cp3},
+ {ram_overrides_dump,true},
+ {min,[Tab2]}])),
+ Write = fun Loop (N) ->
+ case N > 0 of
+ true ->
+ mnesia:dirty_write({Tab2, N+100, N+100}),
+ Loop(N-1);
+ false ->
+ ok
+ end
+ end,
+ ok = Write(100000),
+ spawn_link(fun() -> ?match({atomic, ok},mnesia:delete_table(Tab2)) end),
+
+ %% We don't check result here, depends on timing of above call
+ mnesia:backup_checkpoint(cp3, File2),
file:delete(File1), file:delete(File2),
- ?verify_mnesia(Ns, []).
+ ?verify_mnesia([N1], [N2]).
diff --git a/lib/mnesia/vsn.mk b/lib/mnesia/vsn.mk
index 439b21e58c..e272a469bb 100644
--- a/lib/mnesia/vsn.mk
+++ b/lib/mnesia/vsn.mk
@@ -1 +1 @@
-MNESIA_VSN = 4.14.2
+MNESIA_VSN = 4.14.3
diff --git a/lib/observer/src/cdv_bin_cb.erl b/lib/observer/src/cdv_bin_cb.erl
index 0cea1fdcf0..200c728a62 100644
--- a/lib/observer/src/cdv_bin_cb.erl
+++ b/lib/observer/src/cdv_bin_cb.erl
@@ -58,7 +58,7 @@ binary_to_term_fun(Bin) ->
try binary_to_term(Bin) of
Term -> plain_html(io_lib:format("~p",[Term]))
catch error:badarg ->
- Warning = "This binary can not be coverted to an Erlang term",
+ Warning = "This binary can not be converted to an Erlang term",
observer_html_lib:warning(Warning)
end
end.
diff --git a/lib/observer/src/cdv_detail_wx.erl b/lib/observer/src/cdv_detail_wx.erl
index 44f121f359..5782339183 100644
--- a/lib/observer/src/cdv_detail_wx.erl
+++ b/lib/observer/src/cdv_detail_wx.erl
@@ -55,7 +55,7 @@ init([Id, Data, ParentFrame, Callback, Parent]) ->
end,
{stop,normal};
{info,Info} ->
- observer_lib:display_info_dialog(Info),
+ observer_lib:display_info_dialog(ParentFrame,Info),
{stop,normal}
end.
diff --git a/lib/observer/src/observer_app_wx.erl b/lib/observer/src/observer_app_wx.erl
index 936b2783e2..80a41fdde9 100644
--- a/lib/observer/src/observer_app_wx.erl
+++ b/lib/observer/src/observer_app_wx.erl
@@ -191,8 +191,8 @@ handle_event(#wx{event=#wxMouse{type=Type, x=X0, y=Y0}},
end;
handle_event(#wx{event=#wxCommand{type=command_menu_selected}},
- State = #state{sel=undefined}) ->
- observer_lib:display_info_dialog("Select process first"),
+ State = #state{panel=Panel,sel=undefined}) ->
+ observer_lib:display_info_dialog(Panel,"Select process first"),
{noreply, State};
handle_event(#wx{id=?ID_PROC_INFO, event=#wxCommand{type=command_menu_selected}},
@@ -205,7 +205,7 @@ handle_event(#wx{id=?ID_PROC_MSG, event=#wxCommand{type=command_menu_selected}},
case observer_lib:user_term(Panel, "Enter message", "") of
cancel -> ok;
{ok, Term} -> Pid ! Term;
- {error, Error} -> observer_lib:display_info_dialog(Error)
+ {error, Error} -> observer_lib:display_info_dialog(Panel,Error)
end,
{noreply, State};
@@ -214,7 +214,7 @@ handle_event(#wx{id=?ID_PROC_KILL, event=#wxCommand{type=command_menu_selected}}
case observer_lib:user_term(Panel, "Enter Exit Reason", "kill") of
cancel -> ok;
{ok, Term} -> exit(Pid, Term);
- {error, Error} -> observer_lib:display_info_dialog(Error)
+ {error, Error} -> observer_lib:display_info_dialog(Panel,Error)
end,
{noreply, State};
diff --git a/lib/observer/src/observer_lib.erl b/lib/observer/src/observer_lib.erl
index 1eaba31a3a..47844c1307 100644
--- a/lib/observer/src/observer_lib.erl
+++ b/lib/observer/src/observer_lib.erl
@@ -20,7 +20,7 @@
-module(observer_lib).
-export([get_wx_parent/1,
- display_info_dialog/1, display_yes_no_dialog/1,
+ display_info_dialog/2, display_yes_no_dialog/1,
display_progress_dialog/2, destroy_progress_dialog/0,
wait_for_progress/0, report_progress/1,
user_term/3, user_term_multiline/3,
@@ -105,10 +105,10 @@ setup_timer(Bool, {Timer, Old}) ->
timer:cancel(Timer),
setup_timer(Bool, {false, Old}).
-display_info_dialog(Str) ->
- display_info_dialog("",Str).
-display_info_dialog(Title,Str) ->
- Dlg = wxMessageDialog:new(wx:null(), Str, [{caption,Title}]),
+display_info_dialog(Parent,Str) ->
+ display_info_dialog(Parent,"",Str).
+display_info_dialog(Parent,Title,Str) ->
+ Dlg = wxMessageDialog:new(Parent, Str, [{caption,Title}]),
wxMessageDialog:showModal(Dlg),
wxMessageDialog:destroy(Dlg),
ok.
@@ -724,7 +724,7 @@ progress_loop(Title,PD,Caller) ->
if is_list(Reason) -> Reason;
true -> file:format_error(Reason)
end,
- display_info_dialog("Crashdump Viewer Error",FailMsg),
+ display_info_dialog(PD,"Crashdump Viewer Error",FailMsg),
Caller ! error,
unregister(?progress_handler),
unlink(Caller);
diff --git a/lib/observer/src/observer_port_wx.erl b/lib/observer/src/observer_port_wx.erl
index 53ba3fa607..c21d2705c0 100644
--- a/lib/observer/src/observer_port_wx.erl
+++ b/lib/observer/src/observer_port_wx.erl
@@ -267,10 +267,19 @@ handle_cast(Event, _State) ->
error({unhandled_cast, Event}).
handle_info({portinfo_open, PortIdStr},
- State = #state{grid=Grid, ports=Ports, open_wins=Opened}) ->
- Port = lists:keyfind(PortIdStr,#port.id_str,Ports),
- NewOpened = display_port_info(Grid, Port, Opened),
- {noreply, State#state{open_wins = NewOpened}};
+ State = #state{node=Node, grid=Grid, opt=Opt, open_wins=Opened}) ->
+ Ports0 = get_ports(Node),
+ Ports = update_grid(Grid, Opt, Ports0),
+ Port = lists:keyfind(PortIdStr, #port.id_str, Ports),
+ NewOpened =
+ case Port of
+ false ->
+ self() ! {error,"No such port: " ++ PortIdStr},
+ Opened;
+ _ ->
+ display_port_info(Grid, Port, Opened)
+ end,
+ {noreply, State#state{ports=Ports, open_wins=NewOpened}};
handle_info(refresh_interval, State = #state{node=Node, grid=Grid, opt=Opt,
ports=OldPorts}) ->
@@ -296,8 +305,9 @@ handle_info(not_active, State = #state{timer = Timer0}) ->
Timer = observer_lib:stop_timer(Timer0),
{noreply, State#state{timer=Timer}};
-handle_info({error, Error}, State) ->
- handle_error(Error),
+handle_info({error, Error}, #state{panel=Panel} = State) ->
+ Str = io_lib:format("ERROR: ~s~n",[Error]),
+ observer_lib:display_info_dialog(Panel, Str),
{noreply, State};
handle_info(_Event, State) ->
@@ -501,11 +511,6 @@ filter_monitor_info() ->
[Pid || {process, Pid} <- Ms]
end.
-
-handle_error(Foo) ->
- Str = io_lib:format("ERROR: ~s~n",[Foo]),
- observer_lib:display_info_dialog(Str).
-
update_grid(Grid, Opt, Ports) ->
wx:batch(fun() -> update_grid2(Grid, Opt, Ports) end).
update_grid2(Grid, #opt{sort_key=Sort,sort_incr=Dir}, Ports) ->
diff --git a/lib/observer/src/observer_procinfo.erl b/lib/observer/src/observer_procinfo.erl
index c13b164ff9..21eb9facc5 100644
--- a/lib/observer/src/observer_procinfo.erl
+++ b/lib/observer/src/observer_procinfo.erl
@@ -92,7 +92,7 @@ init([Pid, ParentFrame, Parent]) ->
observer_wx:return_to_localnode(ParentFrame, node(Pid)),
{stop, badrpc};
process_undefined ->
- observer_lib:display_info_dialog("No such alive process"),
+ observer_lib:display_info_dialog(ParentFrame,"No such alive process"),
{stop, normal}
end.
diff --git a/lib/observer/src/observer_tv_wx.erl b/lib/observer/src/observer_tv_wx.erl
index 968a7620aa..4356cb890c 100644
--- a/lib/observer/src/observer_tv_wx.erl
+++ b/lib/observer/src/observer_tv_wx.erl
@@ -238,8 +238,9 @@ handle_info(not_active, State = #state{timer = Timer0}) ->
Timer = observer_lib:stop_timer(Timer0),
{noreply, State#state{timer=Timer}};
-handle_info({error, Error}, #state{opt=Opt}=State) ->
- handle_error(Error),
+handle_info({error, Error}, #state{panel=Panel,opt=Opt}=State) ->
+ Str = io_lib:format("ERROR: ~s~n",[Error]),
+ observer_lib:display_info_dialog(Panel,Str),
case Opt#opt.type of
mnesia -> wxMenuBar:check(observer_wx:get_menubar(), ?ID_ETS, true);
_ -> ok
@@ -365,10 +366,6 @@ list_to_strings([A]) -> integer_to_list(A);
list_to_strings([A|B]) ->
integer_to_list(A) ++ " ," ++ list_to_strings(B).
-handle_error(Foo) ->
- Str = io_lib:format("ERROR: ~s~n",[Foo]),
- observer_lib:display_info_dialog(Str).
-
update_grid(Grid, Opt, Tables) ->
wx:batch(fun() -> update_grid2(Grid, Opt, Tables) end).
update_grid2(Grid, #opt{sort_key=Sort,sort_incr=Dir}, Tables) ->
diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl
index 5732c12006..83de4fa64c 100644
--- a/lib/observer/src/observer_wx.erl
+++ b/lib/observer/src/observer_wx.erl
@@ -467,10 +467,10 @@ handle_info(_Info, State) ->
stop_servers(#state{node=Node, log=LogOn, sys_panel=Sys, pro_panel=Procs, tv_panel=TVs,
trace_panel=Trace, app_panel=Apps, perf_panel=Perfs,
- allc_panel=Alloc} = _State) ->
+ allc_panel=Alloc, port_panel=Ports} = _State) ->
LogOn andalso rpc:block_call(Node, rb, stop, []),
Me = self(),
- Tabs = [Sys, Procs, TVs, Trace, Apps, Perfs, Alloc],
+ Tabs = [Sys, Procs, Ports, TVs, Trace, Apps, Perfs, Alloc],
Stop = fun() ->
try
_ = [wx_object:stop(Panel) || Panel <- Tabs],
@@ -580,9 +580,10 @@ get_active_pid(#state{notebook=Notebook, pro_panel=Pro, sys_panel=Sys,
pid2panel(Pid, #state{pro_panel=Pro, sys_panel=Sys,
tv_panel=Tv, trace_panel=Trace, app_panel=App,
- perf_panel=Perf, allc_panel=Alloc}) ->
+ perf_panel=Perf, allc_panel=Alloc, port_panel=Port}) ->
case Pid of
Pro -> "Processes";
+ Port -> "Ports";
Sys -> "System";
Tv -> "Table Viewer" ;
Trace -> ?TRACE_STR;
@@ -635,7 +636,8 @@ create_connect_dialog(connect, #state{frame = Frame}) ->
wxWindow:setSizerAndFit(Dialog, VSizer),
wxSizer:setSizeHints(VSizer, Dialog),
- CookiePath = filename:join(os:getenv("HOME"), ".erlang.cookie"),
+ {ok,[[HomeDir]]} = init:get_argument(home),
+ CookiePath = filename:join(HomeDir, ".erlang.cookie"),
DefaultCookie = case filelib:is_file(CookiePath) of
true ->
{ok, Bin} = file:read_file(CookiePath),
diff --git a/lib/observer/test/crashdump_helper.erl b/lib/observer/test/crashdump_helper.erl
index 4239a3d0d1..e57c8162e4 100644
--- a/lib/observer/test/crashdump_helper.erl
+++ b/lib/observer/test/crashdump_helper.erl
@@ -44,7 +44,7 @@ n1_proc(Creator,_N2,Pid2,Port2,_L) ->
Ref = make_ref(),
Pid = self(),
Bin = list_to_binary(lists:seq(1, 255)),
- SubBin = element(1, split_binary(element(2, split_binary(Bin, 8)), 17)),
+ <<_:2,SubBin:17/binary,_/bits>> = Bin,
register(named_port,Port),
diff --git a/lib/observer/test/observer_SUITE.erl b/lib/observer/test/observer_SUITE.erl
index 4c882ad951..b5fb027878 100644
--- a/lib/observer/test/observer_SUITE.erl
+++ b/lib/observer/test/observer_SUITE.erl
@@ -34,7 +34,8 @@
%% Test cases
-export([app_file/1, appup_file/1,
- basic/1, process_win/1, table_win/1
+ basic/1, process_win/1, table_win/1,
+ port_win_when_tab_not_initiated/1
]).
%% Default timetrap timeout (set in init_per_testcase)
@@ -49,7 +50,8 @@ groups() ->
[{gui, [],
[basic,
process_win,
- table_win
+ table_win,
+ port_win_when_tab_not_initiated
]
}].
@@ -299,6 +301,17 @@ table_win(Config) when is_list(Config) ->
observer:stop(),
ok.
+%% Test PR-1296/OTP-14151
+%% Clicking a link to a port before the port tab has been activated the
+%% first time crashes observer.
+port_win_when_tab_not_initiated(Config) ->
+ {ok,Port} = gen_tcp:listen(0,[]),
+ ok = observer:start(),
+ Notebook = setup_whitebox_testing(),
+ observer ! {open_link,erlang:port_to_list(Port)},
+ timer:sleep(1000),
+ observer:stop(),
+ ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/orber/src/cdr_encode.erl b/lib/orber/src/cdr_encode.erl
index f922b330a0..d8d1809f9d 100644
--- a/lib/orber/src/cdr_encode.erl
+++ b/lib/orber/src/cdr_encode.erl
@@ -683,7 +683,7 @@ enc_fixed(_Env, Digits, Scale, Fixed, _Bytes, _Len) ->
orber:dbg("[~p] cdr_encode:enc_fixed(~p, ~p, ~p)~n"
"The supplied fixed type incorrect. Check that the 'digits' and 'scale' field~n"
"match the definition in the IDL-specification. The value field must be~n"
- "a list of Digits lenght.",
+ "a list of Digits length.",
[?LINE, Digits, Scale, Fixed], ?DEBUG_LEVEL),
corba:raise(#'MARSHAL'{completion_status=?COMPLETED_MAYBE}).
diff --git a/lib/orber/src/orber_iiop.hrl b/lib/orber/src/orber_iiop.hrl
index 6bc82fb6d6..1b5d6a84ef 100644
--- a/lib/orber/src/orber_iiop.hrl
+++ b/lib/orber/src/orber_iiop.hrl
@@ -279,8 +279,8 @@
%%----------------------------------------------------------------------
%% Profile Body
%%
-%% iiop_version: describes the version of IIOP that the agent at the
-%% specified adress is prepared to receive.
+%% iiop_version: describes the version of IIOP that the agent at the
+%% specified address is prepared to receive.
%% host: identifies the internet host to which the GIOP messages
%% for the specified object may be sent.
%% port: contains the TCP?IP port number where the target agnet is listening
diff --git a/lib/orber/src/orber_initial_references.erl b/lib/orber/src/orber_initial_references.erl
index 738d702088..8caf69a68b 100644
--- a/lib/orber/src/orber_initial_references.erl
+++ b/lib/orber/src/orber_initial_references.erl
@@ -89,7 +89,7 @@ install(Timeout, Options) ->
end,
Wait = mnesia:wait_for_tables([orber_references], Timeout),
- %% Check if any error has occured yet. If there are errors, return them.
+ %% Check if any error has occurred yet. If there are errors, return them.
if
DB_Result == {atomic, ok},
Wait == ok ->
diff --git a/lib/orber/src/orber_objectkeys.erl b/lib/orber/src/orber_objectkeys.erl
index 1233e4e721..3b1851e9b5 100644
--- a/lib/orber/src/orber_objectkeys.erl
+++ b/lib/orber/src/orber_objectkeys.erl
@@ -344,7 +344,7 @@ install(Timeout, Options) ->
end,
Wait = mnesia:wait_for_tables([orber_objkeys], Timeout),
- %% Check if any error has occured yet. If there are errors, return them.
+ %% Check if any error has occurred yet. If there are errors, return them.
if
DB_Result == {atomic, ok},
Wait == ok ->
diff --git a/lib/os_mon/src/memsup.erl b/lib/os_mon/src/memsup.erl
index 4729d090f8..0a9a883390 100644
--- a/lib/os_mon/src/memsup.erl
+++ b/lib/os_mon/src/memsup.erl
@@ -701,6 +701,7 @@ get_os_wordsize_with_uname() ->
"sparc64" -> 64;
"amd64" -> 64;
"ppc64" -> 64;
+ "s390x" -> 64;
_ -> 32
end.
diff --git a/lib/parsetools/src/leex.erl b/lib/parsetools/src/leex.erl
index 602e47404d..e0f37ae9df 100644
--- a/lib/parsetools/src/leex.erl
+++ b/lib/parsetools/src/leex.erl
@@ -1264,7 +1264,7 @@ pack_dfa([], _, Rs, PDFA) -> {PDFA,Rs}.
%% {Action, AcceptLength, CurrTokLen, RestChars, Line, State}.
%% The return CurrTokLen is always the current number of characters
-%% scanned in the current token. The returns have the follwoing
+%% scanned in the current token. The returns have the following
%% meanings:
%% {Action, AcceptLength, RestChars, Line} -
%% The scanner has reached an accepting end-state, for example after
@@ -1281,7 +1281,7 @@ pack_dfa([], _, Rs, PDFA) -> {PDFA,Rs}.
%%
%% {reject, AcceptLength, CurrTokLen, RestChars, Line, State} -
%% {Action, AcceptLength, CurrTokLen, RestChars, Line, State} -
-%% The scanner has reached a non-accepting transistion state. If
+%% The scanner has reached a non-accepting transition state. If
%% RestChars == [] we need to get more characters to continue.
%% Otherwise if 'reject' then no accepting state has been reached it
%% is an error. If we have an Action and AcceptLength then these are
diff --git a/lib/percept/AUTHORS b/lib/percept/AUTHORS
deleted file mode 100644
index f6c040ae76..0000000000
--- a/lib/percept/AUTHORS
+++ /dev/null
@@ -1,4 +0,0 @@
-Original Authors and Contributors:
-
-Bj�rn-Egil Dahlberg
-Magnus Tho�ng
diff --git a/lib/percept/Makefile b/lib/percept/Makefile
deleted file mode 100644
index 1f51bd2fef..0000000000
--- a/lib/percept/Makefile
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-
-# ----------------------------------------------------
-# Common Macros
-# ----------------------------------------------------
-
-SUB_DIRECTORIES = src priv doc/src
-
-SPECIAL_TARGETS =
-
-# ----------------------------------------------------
-# Default Subdir Targets
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_subdir.mk
diff --git a/lib/percept/c_src/.gitignore b/lib/percept/c_src/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/c_src/.gitignore
+++ /dev/null
diff --git a/lib/percept/doc/html/.gitignore b/lib/percept/doc/html/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/doc/html/.gitignore
+++ /dev/null
diff --git a/lib/percept/doc/man3/.gitignore b/lib/percept/doc/man3/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/doc/man3/.gitignore
+++ /dev/null
diff --git a/lib/percept/doc/pdf/.gitignore b/lib/percept/doc/pdf/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/doc/pdf/.gitignore
+++ /dev/null
diff --git a/lib/percept/doc/src/Makefile b/lib/percept/doc/src/Makefile
deleted file mode 100644
index 2f84d61cbc..0000000000
--- a/lib/percept/doc/src/Makefile
+++ /dev/null
@@ -1,190 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../../vsn.mk
-VSN=$(PERCEPT_VSN)
-APPLICATION=percept
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
-
-# ----------------------------------------------------
-# Help application directory specification
-# ----------------------------------------------------
-
-EDOC_DIR = $(ERL_TOP)/lib/edoc
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-PERCEPT_DIR = $(ERL_TOP)/lib/$(APPLICATION)/src
-RUNTIME_TOOLS_DIR = $(ERL_TOP)/lib/runtime_tools/src
-
-PERCEPT_MODULES = \
- egd\
- percept
-
-RUNTIME_TOOLS_MODULES = \
- percept_profile
-
-XML_APPLICATION_FILES = \
- ref_man.xml
-
-PERCEPT_XML_FILES = $(PERCEPT_MODULES:=.xml)
-
-RUNTIME_TOOLS_XML_FILES = $(RUNTIME_TOOLS_MODULES:=.xml)
-
-MODULE_XML_FILES = $(PERCEPT_XML_FILES) $(RUNTIME_TOOLS_XML_FILES)
-
-XML_REF_MAN = \
- ref_man.xml
-
-XML_REF3_FILES = $(MODULE_XML_FILES)
-
-XML_PART_FILES = \
- part.xml \
- part_notes.xml
-
-XML_REF6_FILES =
-
-XML_CHAPTER_FILES = \
- notes.xml \
- egd_ug.xml \
- percept_ug.xml
-
-GEN_XML = \
- egd_ug.xml \
- percept_ug.xml
-
-BOOK_FILES = book.xml
-
-XML_FILES = \
- $(BOOK_FILES) $(XML_CHAPTER_FILES) \
- $(XML_PART_FILES) $(XML_REF3_FILES) $(XML_REF_MAN)
-
-HTML_EXAMPLE_FILES = \
- percept_examples.html
-
-HTML_STYLESHEET_FILES = \
- ../stylesheet.css
-
-
-GIF_FILES = \
- test1.gif \
- test2.gif \
- test3.gif \
- test4.gif \
- percept_overview.gif \
- percept_processes.gif \
- percept_processinfo.gif \
- percept_compare.gif \
- img_esi_result.gif
-
-# ----------------------------------------------------
-INFO_FILE = ../../info
-
-HTML_FILES = \
- $(XML_REF_MAN:%.xml=$(HTMLDIR)/%.html) \
- $(XML_PART_FILES:%.xml=$(HTMLDIR)/%.html)
-
-MAN3_FILES = $(XML_REF3_FILES:%.xml=$(MAN3DIR)/%.3)
-MAN6_FILES = $(XML_REF6_FILES:%_app.xml=$(MAN6DIR)/%.6)
-
-
-HTML_REF_MAN_FILE = $(HTMLDIR)/index.html
-
-TOP_PDF_FILE = $(PDFDIR)/$(APPLICATION)-$(VSN).pdf
-
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-XML_FLAGS +=
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-$(HTMLDIR)/%.gif: %.gif
- $(INSTALL_DATA) $< $@
-
-docs: pdf html man
-
-$(TOP_PDF_FILE): $(XML_FILES)
-
-pdf: $(TOP_PDF_FILE)
-
-html: gifs $(HTML_REF_MAN_FILE)
-
-clean clean_docs:
- rm -f $(MODULE_XML_FILES) $(GEN_XML)
- rm -rf $(HTMLDIR)/*
- rm -f $(MAN3DIR)/*
- rm -f $(TOP_PDF_FILE) $(TOP_PDF_FILE:%.pdf=%.fo)
- rm -f errs core *~
-
-man: $(MAN3_FILES) $(MAN6_FILES)
-
-gifs: $(GIF_FILES:%=$(HTMLDIR)/%)
-
-xml: $(MODULE_XML_FILES)
-
-$(PERCEPT_XML_FILES):
- escript $(DOCGEN)/priv/bin/xml_from_edoc.escript $(PERCEPT_DIR)/$(@:%.xml=%.erl)
-
-$(RUNTIME_TOOLS_XML_FILES):
- escript $(DOCGEN)/priv/bin/xml_from_edoc.escript $(RUNTIME_TOOLS_DIR)/$(@:%.xml=%.erl)
-
-info:
- @echo "XML_PART_FILES: $(XML_PART_FILES)"
- @echo "XML_APPLICATION_FILES: $(XML_APPLICATION_FILES)"
- @echo "PERCEPT_XML_FILES: $(MODULE_XML_FILES)"
- @echo "PERCEPT_MODULES: $(PERCEPT_MODULES)"
- @echo "HTML_FILES: $(HTML_FILES)"
- @echo "HTMLDIR: $(HTMLDIR)"
-
-
-debug opt:
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_docs_spec: docs
- $(INSTALL_DIR) "$(RELSYSDIR)/doc/pdf"
- $(INSTALL_DATA) $(TOP_PDF_FILE) "$(RELSYSDIR)/doc/pdf"
- $(INSTALL_DIR) "$(RELSYSDIR)/doc/html"
- $(INSTALL_DATA) $(HTML_EXAMPLE_FILES) $(HTML_STYLESHEET_FILES) \
- $(HTMLDIR)/* \
- "$(RELSYSDIR)/doc/html"
- $(INSTALL_DATA) $(INFO_FILE) "$(RELSYSDIR)"
- $(INSTALL_DIR) "$(RELEASE_PATH)/man/man3"
- $(INSTALL_DATA) $(MAN3DIR)/* "$(RELEASE_PATH)/man/man3"
-
-release_spec:
-
diff --git a/lib/percept/doc/src/book.xml b/lib/percept/doc/src/book.xml
deleted file mode 100644
index 5acba1f214..0000000000
--- a/lib/percept/doc/src/book.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE book SYSTEM "book.dtd">
-
-<book xmlns:xi="http://www.w3.org/2001/XInclude">
- <header titlestyle="normal">
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>2007-11-02</date>
- <rev>0.5.0</rev>
- <file>book.xml</file>
- </header>
- <insidecover>
- </insidecover>
- <pagetext>Percept</pagetext>
- <preamble>
- <contents level="2"></contents>
- </preamble>
- <parts lift="no">
- <xi:include href="part.xml"/>
- </parts>
- <applications>
- <xi:include href="ref_man.xml"/>
- </applications>
- <releasenotes>
- <xi:include href="notes.xml"/>
- </releasenotes>
- <listofterms></listofterms>
- <index></index>
-</book>
-
diff --git a/lib/percept/doc/src/egd_ug.xmlsrc b/lib/percept/doc/src/egd_ug.xmlsrc
deleted file mode 100644
index 85d41ada79..0000000000
--- a/lib/percept/doc/src/egd_ug.xmlsrc
+++ /dev/null
@@ -1,90 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>egd</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>2007-11-03</date>
- <rev>A</rev>
- <file>egd_ug.xml</file>
- </header>
- <section>
- <title>Introduction</title>
- <p>
- The egd module is an interface for 2d-image rendering and is used by
- Percept to generate dynamic graphs to its web pages. All code is pure
- erlang, no drivers needed.
- </p>
- <p>
- The library is intended for small to medium image sizes with low
- complexity for optimal performance. The library handles horizontal
- lines better then vertical lines.
- </p>
- <p>
- The foremost purpose for this module is to enable users to
- generate images from erlang code and/or datasets and to
- send these images to either files or web servers.
- </p>
- </section>
- <section>
- <title>File example</title>
- <p>Drawing examples:</p>
- <codeinclude file="img.erl" tag="" type="none"></codeinclude>
- <p> First save. </p>
- <image file="test1.gif">
- <icaption>test1.png</icaption>
- </image>
-
- <p> Second save. </p>
- <image file="test2.gif">
- <icaption>test2.png</icaption>
- </image>
-
- <p> Third save. </p>
- <image file="test3.gif">
- <icaption>test3.png</icaption>
- </image>
-
- <p> Fourth save. </p>
- <image file="test4.gif">
- <icaption>test4.png</icaption>
- </image>
- </section>
- <section>
- <title>ESI example</title>
- <p>Using egd with inets ESI to generate images on the fly:</p>
- <codeinclude file="img_esi.erl" tag="" type="none"></codeinclude>
- <image file="img_esi_result.gif">
- <icaption>Example of result.</icaption>
- </image>
- <p>
- For more information regarding ESI, please see inets application
- <seealso marker="inets:mod_esi">mod_esi</seealso>.
- </p>
- </section>
-</chapter>
-
-
diff --git a/lib/percept/doc/src/fascicules.xml b/lib/percept/doc/src/fascicules.xml
deleted file mode 100644
index 37feca543f..0000000000
--- a/lib/percept/doc/src/fascicules.xml
+++ /dev/null
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE fascicules SYSTEM "fascicules.dtd">
-
-<fascicules>
- <fascicule file="part" href="part_frame.html" entry="no">
- User's Guide
- </fascicule>
- <fascicule file="ref_man" href="ref_man_frame.html" entry="yes">
- Reference Manual
- </fascicule>
- <fascicule file="part_notes" href="part_notes_frame.html" entry="no">
- Release Notes
- </fascicule>
- <fascicule file="" href="../../../../doc/print.html" entry="no">
- Off-Print
- </fascicule>
-</fascicules>
-
diff --git a/lib/percept/doc/src/img.erl b/lib/percept/doc/src/img.erl
deleted file mode 100644
index 8f3bd3839f..0000000000
--- a/lib/percept/doc/src/img.erl
+++ /dev/null
@@ -1,50 +0,0 @@
--module(img).
-
--export([do/0]).
-
-do() ->
- Im = egd:create(200,200),
- Red = egd:color({255,0,0}),
- Green = egd:color({0,255,0}),
- Blue = egd:color({0,0,255}),
- Black = egd:color({0,0,0}),
- Yellow = egd:color({255,255,0}),
-
- % Line and fillRectangle
-
- egd:filledRectangle(Im, {20,20}, {180,180}, Red),
- egd:line(Im, {0,0}, {200,200}, Black),
-
- egd:save(egd:render(Im, png), "/home/egil/test1.png"),
-
- egd:filledEllipse(Im, {45, 60}, {55, 70}, Yellow),
- egd:filledEllipse(Im, {145, 60}, {155, 70}, Blue),
-
- egd:save(egd:render(Im, png), "/home/egil/test2.png"),
-
- R = 80,
- X0 = 99,
- Y0 = 99,
-
- Pts = [ { X0 + trunc(R*math:cos(A*math:pi()*2/360)),
- Y0 + trunc(R*math:sin(A*math:pi()*2/360))
- } || A <- lists:seq(0,359,5)],
- lists:map(
- fun({X,Y}) ->
- egd:rectangle(Im, {X-5, Y-5}, {X+5,Y+5}, Green)
- end, Pts),
-
- egd:save(egd:render(Im, png), "/home/egil/test3.png"),
-
- % Text
- Filename = filename:join([code:priv_dir(percept), "fonts", "6x11_latin1.wingsfont"]),
- Font = egd_font:load(Filename),
- {W,H} = egd_font:size(Font),
- String = "egd says hello",
- Length = length(String),
-
- egd:text(Im, {round(100 - W*Length/2), 200 - H - 5}, Font, String, Black),
-
- egd:save(egd:render(Im, png), "/home/egil/test4.png"),
-
- egd:destroy(Im).
diff --git a/lib/percept/doc/src/img_esi.erl b/lib/percept/doc/src/img_esi.erl
deleted file mode 100644
index e9796819c0..0000000000
--- a/lib/percept/doc/src/img_esi.erl
+++ /dev/null
@@ -1,25 +0,0 @@
--module(img_esi).
-
--export([image/3]).
-
-image(SessionID, _Env, _Input) ->
- mod_esi:deliver(SessionID, header()),
- Binary = my_image(),
- mod_esi:deliver(SessionID, binary_to_list(Binary)).
-
-my_image() ->
- Im = egd:create(300,20),
- Black = egd:color({0,0,0}),
- Red = egd:color({255,0,0}),
- egd:filledRectangle(Im, {30,14}, {270,19}, Red),
- egd:rectangle(Im, {30,14}, {270,19}, Black),
-
- Filename = filename:join([code:priv_dir(percept), "fonts", "6x11_latin1.wingsfont"]),
- Font = egd_font:load(Filename),
- egd:text(Im, {30, 0}, Font, "egd with esi callback", Black),
- Bin = egd:render(Im, png),
- egd:destroy(Im),
- Bin.
-
-header() ->
- "Content-Type: image/png\r\n\r\n".
diff --git a/lib/percept/doc/src/img_esi_result.gif b/lib/percept/doc/src/img_esi_result.gif
deleted file mode 100644
index 6973392998..0000000000
--- a/lib/percept/doc/src/img_esi_result.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/ipc_tree.erl b/lib/percept/doc/src/ipc_tree.erl
deleted file mode 100644
index 89360379c6..0000000000
--- a/lib/percept/doc/src/ipc_tree.erl
+++ /dev/null
@@ -1,30 +0,0 @@
--module(ipc_tree).
--export([go/1, init/2]).
-
-go(N) ->
- start(N, self()),
- receive {_,stop} -> ok end.
-
-start(Depth, ParentPid) ->
- spawn(?MODULE, init, [Depth, ParentPid]).
-
-init(0, ParentPid) ->
- workload(5000),
- ParentPid ! {self(),stop},
- ok;
-init(Depth, ParentPid) ->
- Pid1 = spawn(?MODULE, init, [Depth - 1, self()]),
- Pid2 = spawn(?MODULE, init, [Depth - 1, self()]),
- main([Pid1,Pid2], ParentPid).
-
-main(Pids, ParentPid) ->
- workload(5000),
- gather(Pids),
- ParentPid ! {self(),stop},
- ok.
-
-gather([]) -> ok;
-gather([Pid|Pids]) -> receive {Pid,stop} -> gather(Pids) end.
-
-workload(0) -> ok;
-workload(N) -> math:sin(2), workload(N - 1).
diff --git a/lib/percept/doc/src/notes.xml b/lib/percept/doc/src/notes.xml
deleted file mode 100644
index c9d5d3ae29..0000000000
--- a/lib/percept/doc/src/notes.xml
+++ /dev/null
@@ -1,495 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept Release Notes</title>
- <prepared>otp_appnotes</prepared>
- <docno>nil</docno>
- <date>nil</date>
- <rev>nil</rev>
- <file>notes.xml</file>
- </header>
- <p>This document describes the changes made to the Percept application.</p>
-
-<section><title>Percept 0.9</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Remove deprecated <c>erlang:now/0</c> calls</p>
- <p>
- Own Id: OTP-13422</p>
- </item>
- </list>
- </section>
-
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Improve line implementation</p>
- <p>
- Add capabilities for line thickness and anti-aliasing.</p>
- <p>
- Own Id: OTP-13598</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.11</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Fix http server configuration</p>
- <p>
- Own Id: OTP-12662</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.10</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Make sure to install .hrl files when needed</p>
- <p>
- Own Id: OTP-12197</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.9</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Application upgrade (appup) files are corrected for the
- following applications: </p>
- <p>
- <c>asn1, common_test, compiler, crypto, debugger,
- dialyzer, edoc, eldap, erl_docgen, et, eunit, gs, hipe,
- inets, observer, odbc, os_mon, otp_mibs, parsetools,
- percept, public_key, reltool, runtime_tools, ssh,
- syntax_tools, test_server, tools, typer, webtool, wx,
- xmerl</c></p>
- <p>
- A new test utility for testing appup files is added to
- test_server. This is now used by most applications in
- OTP.</p>
- <p>
- (Thanks to Tobias Schlager)</p>
- <p>
- Own Id: OTP-11744</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.8.2</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- The encoding of the <c>notes.xml</c> file has been
- changed from latin1 to utf-8 to avoid future merge
- problems.</p>
- <p>
- Own Id: OTP-11310</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.8.1</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p> Postscript files no longer needed for the generation
- of PDF files have been removed. </p>
- <p>
- Own Id: OTP-11016</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.8</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Misc build updates</p>
- <p>
- Own Id: OTP-10784</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.7</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Add missing modules in app-file</p>
- <p>
- Own Id: OTP-10439</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.6.1</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Miscellaneous documentation build updates</p>
- <p>
- Own Id: OTP-9813</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.6</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Fix message handling in select requests</p>
- <p>
- percept_db used to send results in untagged messages, and
- use a non selective receive to extract them. When percept
- is used from the shell process, this can confuse other
- messages with the actual result.</p>
- <p>
- Add a tag to the message to be {result, Result}. Add
- demonitor to avoid keeping DOWN message in the queue fix
- one spec in do_start/0</p>
- <p>
- (Thanks to Ahmed Omar)</p>
- <p>
- Own Id: OTP-9490</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.5</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p> Fixes a race condition found in percept_db start/1
- function. (Thanks to Ahmed Omar) </p>
- <p>
- Own Id: OTP-9012</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.4</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Fix egd_render transparent to use float constants.</p>
- <p>
- The render engine has float guards to enhance beam code
- generation. However, the default case used integers which
- caused the engine to crash. This is now fixed.</p>
- <p>
- Own Id: OTP-8425</p>
- </item>
- </list>
- </section>
-
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>The documentation is now possible to build in an open
- source environment after a number of bugs are fixed and
- some features are added in the documentation build
- process. </p>
- <p>- The arity calculation is updated.</p>
- <p>- The module prefix used in the function names for
- bif's are removed in the generated links so the links
- will look like
- "http://www.erlang.org/doc/man/erlang.html#append_element-2"
- instead of
- "http://www.erlang.org/doc/man/erlang.html#erlang:append_element-2".</p>
- <p>- Enhanced the menu positioning in the html
- documentation when a new page is loaded.</p>
- <p>- A number of corrections in the generation of man
- pages (thanks to Sergei Golovan)</p>
- <p>- The legal notice is taken from the xml book file so
- OTP's build process can be used for non OTP
- applications.</p>
- <p>
- Own Id: OTP-8343</p>
- </item>
- <item>
- <p>
- Cleanups suggested by tidier and modernization of types
- and specs.</p>
- <p>
- Own Id: OTP-8455</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.3</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- The documentation is now built with open source tools
- (xsltproc and fop) that exists on most platforms. One
- visible change is that the frames are removed.</p>
- <p>
- Own Id: OTP-8201</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.2</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Extensions to <c>egd:color/1</c> for using atoms as color
- definition in addition to rgb triplets.</p>
- <p>
- Own Id: OTP-7975</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.1</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p><c>egd</c> now supports encapsulated postscript output
- format.</p>
- <p>
- Own Id: OTP-7923</p>
- </item>
- </list>
- </section>
-
-</section>
-
- <section><title>Percept 0.8</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>A problem with options list to percept causing some
- options to be disregarded unintentionally. This has now
- been fixed.</p> <p>An error in <c>percept_analyzer</c>
- caused calculation of standard deviation to be incorrect.
- This has now been corrected.</p>
- <p>
- Own Id: OTP-7693</p>
- </item>
- </list>
- </section>
-
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>Updated css for percept server for enhanced
- viewing.</p> <p>Increased performance of egd render.</p>
- <p>Several graph errors could occur when compacting data
- to decrease graph rendering time causing incorrect
- scalability numbers. These errors have now been
- fixed.</p> <p>Increased viewing width for graphs. The
- viewing width is now dependent on client screen
- resolution.</p>
- <p>
- Own Id: OTP-7696</p>
- </item>
- </list>
- </section>
-
-</section>
-<section><title>Percept 0.7.3</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>External pids caused the webserver to crash. This has
- now been fixed.</p>
- <p>
- Own Id: OTP-7515 Aux Id: seq11004 </p>
- </item>
- <item>
- <p>Fixed a timestamp problem where some events could be
- sent out of order. Minor fixes to presentation of
- data.</p>
- <p>
- Own Id: OTP-7544 Aux Id: otp-7442 </p>
- </item>
- </list>
- </section>
-
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>Performance enhancement for the egd render engine
- (Thanks to Magnus Thoäng).</p>
- <p>
- Own Id: OTP-7616</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.7.2</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>Calling <c>egd:destroy/1</c> did not properly remove
- the process holding the image.</p>
- <p>Synchronous calls done via the egd interface could
- erroneous receive messages not intended for egd. Messages
- are now tagged in such a way so this should not
- occur.</p>
- <p>
- Own Id: OTP-7336</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.7.1</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Fixed out of bounds rendering problem in egd which could
- cause the rendering process to crash.</p>
- <p>
- Own Id: OTP-7215</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.7</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>Percept no longer depends on external c-libraries. The
- graphical rendering is now done via erlang code.</p>
- <p>
- Own Id: OTP-7162</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.6.2</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- A new module, percept_profile, can now be used to collect
- profiling data even if the percept application is not
- installed. This should help profiling erlang application
- on target machines without libgd installed.</p>
- <p>
- Own Id: OTP-7126</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section>
- <title>Percept 0.5.0</title>
- <section><title>First Release</title>
- <list>
- <item>
- <p>
- First Release.
- </p>
- <p>Own Id: OTP-6783</p>
- </item>
- </list>
- </section>
- </section>
-</chapter>
-
diff --git a/lib/percept/doc/src/part.xml b/lib/percept/doc/src/part.xml
deleted file mode 100644
index 277d89d45c..0000000000
--- a/lib/percept/doc/src/part.xml
+++ /dev/null
@@ -1,47 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE part SYSTEM "part.dtd">
-
-<part xmlns:xi="http://www.w3.org/2001/XInclude">
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept User's Guide</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>2007-11-02</date>
- <rev>0.5.0</rev>
- <file>part.xml</file>
- </header>
- <description>
- <p>
- <em>Percept</em> is an acronym for <em>P</em>ercept - <em>er</em>lang
- <em>c</em>oncurr<em>e</em>ncy <em>p</em>rofiling <em>t</em>ool.
- </p>
- <p>
- It is a tool to visualize application level concurrency and
- identify concurrency bottlenecks.
- </p>
- </description>
- <xi:include href="percept_ug.xml"/>
- <xi:include href="egd_ug.xml"/>
-</part>
-
diff --git a/lib/percept/doc/src/part_notes.xml b/lib/percept/doc/src/part_notes.xml
deleted file mode 100644
index f428b4fd81..0000000000
--- a/lib/percept/doc/src/part_notes.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE part SYSTEM "part.dtd">
-
-<part xmlns:xi="http://www.w3.org/2001/XInclude">
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept Release Notes</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>>2007-11-02</date>
- <rev></rev>
- <file>part_notes.xml</file>
- </header>
- <description>
- <p>
- The <em>Percept</em> application.
- </p>
- </description>
- <xi:include href="notes.xml"/>
-</part>
-
diff --git a/lib/percept/doc/src/percept_compare.gif b/lib/percept/doc/src/percept_compare.gif
deleted file mode 100644
index 1c8ccf0186..0000000000
--- a/lib/percept/doc/src/percept_compare.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/percept_examples.html b/lib/percept/doc/src/percept_examples.html
deleted file mode 100644
index df2f52bdfd..0000000000
--- a/lib/percept/doc/src/percept_examples.html
+++ /dev/null
@@ -1,11 +0,0 @@
-<meta http-equiv="Context-Type" content="text/html; charset=iso-8859-1">
-<?xml version="1.0" encoding="iso-8859-1"?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd ">
-<html xmlns="http://www.w3.org/1999/xhtml" ><head>
-<title>Customization functions</title>
-<link rel="stylesheet" type="text/css" href="stylesheet.css">
-</head>
-<body>
-<h1>Customization functions</h1>
-</body>
-</html>
diff --git a/lib/percept/doc/src/percept_overview.gif b/lib/percept/doc/src/percept_overview.gif
deleted file mode 100644
index 12ac172472..0000000000
--- a/lib/percept/doc/src/percept_overview.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/percept_processes.gif b/lib/percept/doc/src/percept_processes.gif
deleted file mode 100644
index 640ff50ee2..0000000000
--- a/lib/percept/doc/src/percept_processes.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/percept_processinfo.gif b/lib/percept/doc/src/percept_processinfo.gif
deleted file mode 100644
index 00cc05f5c9..0000000000
--- a/lib/percept/doc/src/percept_processinfo.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/percept_ug.xmlsrc b/lib/percept/doc/src/percept_ug.xmlsrc
deleted file mode 100644
index 0d243cdabe..0000000000
--- a/lib/percept/doc/src/percept_ug.xmlsrc
+++ /dev/null
@@ -1,223 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>2007-11-02</date>
- <rev>A</rev>
- <file>percept_ug.xml</file>
- </header>
- <p>
- Percept, or Percept - Erlang Concurrency Profiling Tool, utilizes trace
- informations and profiler events to form a picture of the processes's and
- ports runnability.
- </p>
-
- <section>
- <title>Introduction</title>
- <p>
- Percept uses <c>erlang:trace/3</c> and <c>erlang:system_profile/2</c> to monitor events from
- process states. Such states are,</p>
- <list>
- <item>waiting</item>
- <item>running</item>
- <item>runnable</item>
- <item>free</item>
- <item>exiting</item>
- </list>
- <p>
- There are some other states too, <c>suspended</c>, <c>hibernating</c>, and
- garbage collecting (<c>gc</c>). The only ignored state is <c>gc</c> and a process is considered to have
- its previous state through out the entire garbage collecting phase. The main reason for this, is that our
- model considers the <c>gc</c> as a third state neither active nor inactive.
- </p>
- <p>
- A waiting or suspended process is considered an inactive process and a running or
- runnable process is considered an active process.
- </p>
- <p>
- Events are collected and stored to a file. The file can be moved and
- analyzed on a different machine than the target machine.
- </p>
- <p>
- Note, even if percept is not installed on your target machine, profiling
- can still be done via the module <seealso marker="percept_profile">percept_profile</seealso>
- located in runtime_tools.
- </p>
- </section>
- <section>
- <title>Getting started</title>
- <section>
- <title>Profiling</title>
- <p>
- There are a few ways to start the profiling of a specific code. The
- command <c>percept:profile/3</c> is a preferred way.
- </p>
- <p>
- The command takes a filename for the data destination file as first
- argument, a callback entry-point as second argument and a
- list of specific profiler options, for instance <c>procs</c>, as third
- argument.
- </p>
- <p>
- Let's say we have a module called example that initializes our
- profiling-test and let it run under some defined manner designed by ourself.
- The module needs a start function, let's call it go and it takes zero arguments.
- The start arguments would look like:
- </p>
- <p><c>percept:profile("test.dat", {test, go, []}, [procs]).</c></p>
- <p>
- For a semi-real example we start a tree of processes that does sorting
- of random numbers. In our model below we use a controller process that
- distributes work to different client processes.
- </p>
- <codeinclude file="sorter.erl" tag="" type="none"></codeinclude>
- <p>We can now start our test using percept:</p>
- <pre>
-Erlang (BEAM) emulator version 5.6 [async-threads:0] [kernel-poll:false]
-
-Eshell V5.6 (abort with ^G)
-1> percept:profile("test.dat", {sorter, go, [5, 2000, 15]}, [procs]).
-Starting profiling.
-ok
- </pre>
- <p>
- Percept sets up the trace and profiling facilities to listen for process
- specific events. It then stores these events to the <c>test.dat</c>
- file. The profiling will go on for the whole duration until
- <c>sorter:go/3</c> returns and the profiling has concluded.
- </p>
- </section>
- <section>
- <title>Data viewing</title>
- <p>
- To analyze this file, use <c>percept:analyze("test.dat")</c>. We can do
- this on any machine with Percept installed. The command will parse the
- data file and insert all events in a RAM database, <c>percept_db</c>. The
- initial command will only prompt how many processes were involved in the
- profile.
- </p>
- <pre>
-2> percept:analyze("test.dat").
-Parsing: "test.dat"
-Parsed 428 entries in 3.81310e-2 s.
- 17 created processes.
- 0 opened ports.
-ok
- </pre>
- <p>
- To view the data we start the web-server using
- <c>percept:start_webserver/1</c>. The command will return the hostname
- and the a port where we should direct our favorite web browser.
- </p>
- <pre>
-3> percept:start_webserver(8888).
-{started,"durin",8888}
-4>
- </pre>
- <section>
- <title>Overview selection</title>
- <p>
- Now we can view our data. The database has its content from
- <c>percept:analyze/1</c> command and the webserver is started.
- </p>
- <p>
- When we click on the <c>overview</c> button in the menu percept will
- generate a graph of the concurrency and send it to our web browser. In this
- view we get no details but rather the big picture. We can see if
- our processes behave in an inefficient manner. Dips in the graph represents
- low concurrency in the erlang system.
- </p>
- <p>
- We can zoom in on different areas of the graph either using the mouse
- to select an area or by specifying min and max ranges in the edit boxes.
- </p>
- <note>
- <p>Measured time is presented in seconds if nothing else is stated.</p>
- </note>
- <image file="percept_overview.gif">
- <icaption>Overview selection</icaption>
- </image>
- </section>
- <section>
- <title>Processes selection</title>
- <p>
- To get a more detailed description we can select the process view by
- clicking the <c>processes</c> button in the menu.
- </p>
- <p>
- The table shows process id's that are click-able and direct you to
- the process information page, a lifetime bar that presents a rough estimate
- in green color about when the process was alive during profiling, an
- entry-point, its registered name if it had one and the process's
- parent id.
- </p>
- <p>
- We can select which processes we want to compare and then hit the
- <c>compare</c> button on the top right of the screen.
- </p>
- <image file="percept_processes.gif">
- <icaption>Processes selection</icaption>
- </image>
- </section>
- <section>
- <title>Compare selection</title>
- <p>
- The activity bar under the concurrency graph shows each process's
- runnability. The color green shows when a process is active (which is
- running or runnable) and the white color represents time when a
- process is inactive (waiting in a receive or is suspended).
- </p>
- <p>
- To inspect a certain process click on the process id button, this will
- direct you to a process information page for that specific process.
- </p>
- <image file="percept_compare.gif">
- <icaption>Processes compare selection</icaption>
- </image>
- </section>
- <section>
- <title>Process information selection</title>
- <p>
- Here we can some general information for the process. Parent and
- children processes, spawn and exit times, entry-point and start arguments.
- </p>
- <p>
- We can also see the process' inactive times. How many times it has
- been waiting, statistical information and most importantly in which
- function.
- </p>
- <p>
- The time percentages presented in process information are of time spent in waiting, not total run time.
- </p>
- <image file="percept_processinfo.gif">
- <icaption>Process information selection</icaption>
- </image>
- </section>
- </section>
- </section>
-</chapter>
diff --git a/lib/percept/doc/src/ref_man.xml b/lib/percept/doc/src/ref_man.xml
deleted file mode 100644
index 143312489b..0000000000
--- a/lib/percept/doc/src/ref_man.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE application SYSTEM "application.dtd">
-
-<application xmlns:xi="http://www.w3.org/2001/XInclude">
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept Reference Manual</title>
- <prepared>Edoc</prepared>
- <docno></docno>
- <date>2007-11-02</date>
- <rev>1.0</rev>
- <file>ref_man.xml</file>
- </header>
- <description>
- <p>
- <em>Percept</em> is an acronym for <em>P</em>ercept - <em>er</em>lang
- <em>c</em>oncurr<em>e</em>ncy <em>p</em>rofiling <em>t</em>ool.
- </p>
- <p>
- It is a tool to visualize application level concurrency and
- identify concurrency bottlenecks.
- </p>
- </description>
- <xi:include href="egd.xml"/>
- <xi:include href="percept.xml"/>
- <xi:include href="percept_profile.xml"/>
-</application>
-
diff --git a/lib/percept/doc/src/sorter.erl b/lib/percept/doc/src/sorter.erl
deleted file mode 100644
index 8d5f2c715c..0000000000
--- a/lib/percept/doc/src/sorter.erl
+++ /dev/null
@@ -1,41 +0,0 @@
--module(sorter).
--export([go/3,loop/0,main/4]).
-
-go(I,N,M) ->
- spawn(?MODULE, main, [I,N,M,self()]),
- receive done -> ok end.
-
-main(I,N,M,Parent) ->
- Pids = lists:foldl(
- fun(_,Ps) ->
- [ spawn(?MODULE,loop, []) | Ps]
- end, [], lists:seq(1,M)),
-
- lists:foreach(
- fun(_) ->
- send_work(N,Pids),
- gather(Pids)
- end, lists:seq(1,I)),
-
- lists:foreach(
- fun(Pid) ->
- Pid ! {self(), quit}
- end, Pids),
-
- gather(Pids), Parent ! done.
-
-send_work(_,[]) -> ok;
-send_work(N,[Pid|Pids]) ->
- Pid ! {self(),sort,N},
- send_work(round(N*1.2),Pids).
-
-loop() ->
- receive
- {Pid, sort, N} -> dummy_sort(N),Pid ! {self(), done},loop();
- {Pid, quit} -> Pid ! {self(), done}
- end.
-
-dummy_sort(N) -> lists:sort([ random:uniform(N) || _ <- lists:seq(1,N)]).
-
-gather([]) -> ok;
-gather([Pid|Pids]) -> receive {Pid, done} -> gather(Pids) end.
diff --git a/lib/percept/doc/src/test1.gif b/lib/percept/doc/src/test1.gif
deleted file mode 100644
index 70a519d8e3..0000000000
--- a/lib/percept/doc/src/test1.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/test2.gif b/lib/percept/doc/src/test2.gif
deleted file mode 100644
index f18e1f9e58..0000000000
--- a/lib/percept/doc/src/test2.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/test3.gif b/lib/percept/doc/src/test3.gif
deleted file mode 100644
index c7581f19aa..0000000000
--- a/lib/percept/doc/src/test3.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/test4.gif b/lib/percept/doc/src/test4.gif
deleted file mode 100644
index e7d52c08a3..0000000000
--- a/lib/percept/doc/src/test4.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/stylesheet.css b/lib/percept/doc/stylesheet.css
deleted file mode 100644
index 24d8a02145..0000000000
--- a/lib/percept/doc/stylesheet.css
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-BODY {color: #000000;
- background-color: #ffffff;
- margin-left: .4in}
-H1 {margin-left: -.4in}
-H2 {margin-left: -.4in}
-H3 {margin-left: -.2in}
-.logo{float:right;}
-.toc UL {
- list-style-type: none;
- border: solid;
- border-width: thin;
- padding-left: 10px;
- padding-right: 10px;
- padding-top: 5px;
- padding-bottom: 5px;
- background: #f0f0f0;
- letter-spacing: 2px;
- line-height: 20px;
-}
diff --git a/lib/percept/ebin/.gitignore b/lib/percept/ebin/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/ebin/.gitignore
+++ /dev/null
diff --git a/lib/percept/include/.gitignore b/lib/percept/include/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/include/.gitignore
+++ /dev/null
diff --git a/lib/percept/info b/lib/percept/info
deleted file mode 100644
index 07d58d28ae..0000000000
--- a/lib/percept/info
+++ /dev/null
@@ -1,2 +0,0 @@
-group: tools
-short: A concurrency profiler tool.
diff --git a/lib/percept/priv/Makefile b/lib/percept/priv/Makefile
deleted file mode 100644
index a1912edfc0..0000000000
--- a/lib/percept/priv/Makefile
+++ /dev/null
@@ -1,97 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../vsn.mk
-VSN=$(PERCEPT_VSN)
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/percept-$(VSN)
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-CONF_FILES = \
- server_root/conf/mime.types
-
-HTDOCS_FILES = \
- server_root/htdocs/index.html
-
-IMAGE_FILES = \
- server_root/images/nav.png \
- server_root/images/white.png
-
-SCRIPT_FILES = \
- server_root/scripts/percept_area_select.js \
- server_root/scripts/percept_error_handler.js \
- server_root/scripts/percept_select_all.js
-
-CSS_FILES = \
- server_root/css/percept.css
-
-FONT_FILES = \
- fonts/6x11_latin1.wingsfont
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-ERL_COMPILE_FLAGS +=
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-debug opt:
-
-clean:
-
-docs:
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
- # Finished
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/logs"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/htdocs"
- $(INSTALL_DATA) $(HTDOCS_FILES) "$(RELSYSDIR)/priv/server_root/htdocs"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/conf"
- $(INSTALL_DATA) $(CONF_FILES) "$(RELSYSDIR)/priv/server_root/conf"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/scripts"
- $(INSTALL_DATA) $(SCRIPT_FILES) "$(RELSYSDIR)/priv/server_root/scripts"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/css"
- $(INSTALL_DATA) $(CSS_FILES) "$(RELSYSDIR)/priv/server_root/css"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/images"
- $(INSTALL_DATA) $(IMAGE_FILES) "$(RELSYSDIR)/priv/server_root/images"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/fonts"
- $(INSTALL_DATA) $(FONT_FILES) "$(RELSYSDIR)/priv/fonts"
-
-release_docs_spec:
-
diff --git a/lib/percept/priv/fonts/6x11_latin1.wingsfont b/lib/percept/priv/fonts/6x11_latin1.wingsfont
deleted file mode 100644
index d1e1c42eef..0000000000
--- a/lib/percept/priv/fonts/6x11_latin1.wingsfont
+++ /dev/null
Binary files differ
diff --git a/lib/percept/priv/logs/.gitignore b/lib/percept/priv/logs/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/priv/logs/.gitignore
+++ /dev/null
diff --git a/lib/percept/priv/obj/.gitignore b/lib/percept/priv/obj/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/priv/obj/.gitignore
+++ /dev/null
diff --git a/lib/percept/priv/server_root/cgi-bin/.gitignore b/lib/percept/priv/server_root/cgi-bin/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/priv/server_root/cgi-bin/.gitignore
+++ /dev/null
diff --git a/lib/percept/priv/server_root/conf/mime.types b/lib/percept/priv/server_root/conf/mime.types
deleted file mode 100644
index 6245efdbd9..0000000000
--- a/lib/percept/priv/server_root/conf/mime.types
+++ /dev/null
@@ -1,462 +0,0 @@
-application/EDI-Consent
-application/EDI-X12
-application/EDIFACT
-application/activemessage
-application/andrew-inset ez
-application/applefile
-application/atomicmail
-application/batch-SMTP
-application/beep+xml
-application/cals-1840
-application/commonground
-application/cybercash
-application/dca-rft
-application/dec-dx
-application/dvcs
-application/eshop
-application/http
-application/hyperstudio
-application/iges
-application/index
-application/index.cmd
-application/index.obj
-application/index.response
-application/index.vnd
-application/iotp
-application/ipp
-application/isup
-application/font-tdpfr
-application/mac-binhex40 hqx
-application/mac-compactpro cpt
-application/macwriteii
-application/marc
-application/mathematica
-application/mathematica-old
-application/msword doc
-application/news-message-id
-application/news-transmission
-application/ocsp-request
-application/ocsp-response
-application/octet-stream bin dms lha lzh exe class so dll
-application/oda oda
-application/parityfec
-application/pdf pdf
-application/pgp-encrypted
-application/pgp-keys
-application/pgp-signature
-application/pkcs10
-application/pkcs7-mime
-application/pkcs7-signature
-application/pkix-cert
-application/pkix-crl
-application/pkixcmp
-application/postscript ai eps ps
-application/prs.alvestrand.titrax-sheet
-application/prs.cww
-application/prs.nprend
-application/qsig
-application/remote-printing
-application/riscos
-application/rtf
-application/sdp
-application/set-payment
-application/set-payment-initiation
-application/set-registration
-application/set-registration-initiation
-application/sgml
-application/sgml-open-catalog
-application/sieve
-application/slate
-application/smil smi smil
-application/timestamp-query
-application/timestamp-reply
-application/vemmi
-application/vnd.3M.Post-it-Notes
-application/vnd.FloGraphIt
-application/vnd.accpac.simply.aso
-application/vnd.accpac.simply.imp
-application/vnd.acucobol
-application/vnd.aether.imp
-application/vnd.anser-web-certificate-issue-initiation
-application/vnd.anser-web-funds-transfer-initiation
-application/vnd.audiograph
-application/vnd.businessobjects
-application/vnd.bmi
-application/vnd.canon-cpdl
-application/vnd.canon-lips
-application/vnd.claymore
-application/vnd.commerce-battelle
-application/vnd.commonspace
-application/vnd.comsocaller
-application/vnd.contact.cmsg
-application/vnd.cosmocaller
-application/vnd.cups-postscript
-application/vnd.cups-raster
-application/vnd.cups-raw
-application/vnd.ctc-posml
-application/vnd.cybank
-application/vnd.dna
-application/vnd.dpgraph
-application/vnd.dxr
-application/vnd.ecdis-update
-application/vnd.ecowin.chart
-application/vnd.ecowin.filerequest
-application/vnd.ecowin.fileupdate
-application/vnd.ecowin.series
-application/vnd.ecowin.seriesrequest
-application/vnd.ecowin.seriesupdate
-application/vnd.enliven
-application/vnd.epson.esf
-application/vnd.epson.msf
-application/vnd.epson.quickanime
-application/vnd.epson.salt
-application/vnd.epson.ssf
-application/vnd.ericsson.quickcall
-application/vnd.eudora.data
-application/vnd.fdf
-application/vnd.ffsns
-application/vnd.framemaker
-application/vnd.fsc.weblaunch
-application/vnd.fujitsu.oasys
-application/vnd.fujitsu.oasys2
-application/vnd.fujitsu.oasys3
-application/vnd.fujitsu.oasysgp
-application/vnd.fujitsu.oasysprs
-application/vnd.fujixerox.ddd
-application/vnd.fujixerox.docuworks
-application/vnd.fujixerox.docuworks.binder
-application/vnd.fut-misnet
-application/vnd.grafeq
-application/vnd.groove-account
-application/vnd.groove-identity-message
-application/vnd.groove-injector
-application/vnd.groove-tool-message
-application/vnd.groove-tool-template
-application/vnd.groove-vcard
-application/vnd.hhe.lesson-player
-application/vnd.hp-HPGL
-application/vnd.hp-PCL
-application/vnd.hp-PCLXL
-application/vnd.hp-hpid
-application/vnd.hp-hps
-application/vnd.httphone
-application/vnd.hzn-3d-crossword
-application/vnd.ibm.afplinedata
-application/vnd.ibm.MiniPay
-application/vnd.ibm.modcap
-application/vnd.informix-visionary
-application/vnd.intercon.formnet
-application/vnd.intertrust.digibox
-application/vnd.intertrust.nncp
-application/vnd.intu.qbo
-application/vnd.intu.qfx
-application/vnd.irepository.package+xml
-application/vnd.is-xpr
-application/vnd.japannet-directory-service
-application/vnd.japannet-jpnstore-wakeup
-application/vnd.japannet-payment-wakeup
-application/vnd.japannet-registration
-application/vnd.japannet-registration-wakeup
-application/vnd.japannet-setstore-wakeup
-application/vnd.japannet-verification
-application/vnd.japannet-verification-wakeup
-application/vnd.koan
-application/vnd.lotus-1-2-3
-application/vnd.lotus-approach
-application/vnd.lotus-freelance
-application/vnd.lotus-notes
-application/vnd.lotus-organizer
-application/vnd.lotus-screencam
-application/vnd.lotus-wordpro
-application/vnd.mcd
-application/vnd.mediastation.cdkey
-application/vnd.meridian-slingshot
-application/vnd.mif mif
-application/vnd.minisoft-hp3000-save
-application/vnd.mitsubishi.misty-guard.trustweb
-application/vnd.mobius.daf
-application/vnd.mobius.dis
-application/vnd.mobius.msl
-application/vnd.mobius.plc
-application/vnd.mobius.txf
-application/vnd.motorola.flexsuite
-application/vnd.motorola.flexsuite.adsi
-application/vnd.motorola.flexsuite.fis
-application/vnd.motorola.flexsuite.gotap
-application/vnd.motorola.flexsuite.kmr
-application/vnd.motorola.flexsuite.ttc
-application/vnd.motorola.flexsuite.wem
-application/vnd.mozilla.xul+xml
-application/vnd.ms-artgalry
-application/vnd.ms-asf
-application/vnd.ms-excel xls
-application/vnd.ms-lrm
-application/vnd.ms-powerpoint ppt
-application/vnd.ms-project
-application/vnd.ms-tnef
-application/vnd.ms-works
-application/vnd.mseq
-application/vnd.msign
-application/vnd.music-niff
-application/vnd.musician
-application/vnd.netfpx
-application/vnd.noblenet-directory
-application/vnd.noblenet-sealer
-application/vnd.noblenet-web
-application/vnd.novadigm.EDM
-application/vnd.novadigm.EDX
-application/vnd.novadigm.EXT
-application/vnd.osa.netdeploy
-application/vnd.palm
-application/vnd.pg.format
-application/vnd.pg.osasli
-application/vnd.powerbuilder6
-application/vnd.powerbuilder6-s
-application/vnd.powerbuilder7
-application/vnd.powerbuilder7-s
-application/vnd.powerbuilder75
-application/vnd.powerbuilder75-s
-application/vnd.previewsystems.box
-application/vnd.publishare-delta-tree
-application/vnd.pvi.ptid1
-application/vnd.pwg-xhtml-print+xml
-application/vnd.rapid
-application/vnd.s3sms
-application/vnd.seemail
-application/vnd.shana.informed.formdata
-application/vnd.shana.informed.formtemplate
-application/vnd.shana.informed.interchange
-application/vnd.shana.informed.package
-application/vnd.sss-cod
-application/vnd.sss-dtf
-application/vnd.sss-ntf
-application/vnd.street-stream
-application/vnd.svd
-application/vnd.swiftview-ics
-application/vnd.triscape.mxs
-application/vnd.trueapp
-application/vnd.truedoc
-application/vnd.tve-trigger
-application/vnd.ufdl
-application/vnd.uplanet.alert
-application/vnd.uplanet.alert-wbxml
-application/vnd.uplanet.bearer-choice-wbxml
-application/vnd.uplanet.bearer-choice
-application/vnd.uplanet.cacheop
-application/vnd.uplanet.cacheop-wbxml
-application/vnd.uplanet.channel
-application/vnd.uplanet.channel-wbxml
-application/vnd.uplanet.list
-application/vnd.uplanet.list-wbxml
-application/vnd.uplanet.listcmd
-application/vnd.uplanet.listcmd-wbxml
-application/vnd.uplanet.signal
-application/vnd.vcx
-application/vnd.vectorworks
-application/vnd.vidsoft.vidconference
-application/vnd.visio
-application/vnd.vividence.scriptfile
-application/vnd.wap.sic
-application/vnd.wap.slc
-application/vnd.wap.wbxml wbxml
-application/vnd.wap.wmlc wmlc
-application/vnd.wap.wmlscriptc wmlsc
-application/vnd.webturbo
-application/vnd.wrq-hp3000-labelled
-application/vnd.wt.stf
-application/vnd.xara
-application/vnd.xfdl
-application/vnd.yellowriver-custom-menu
-application/whoispp-query
-application/whoispp-response
-application/wita
-application/wordperfect5.1
-application/x-bcpio bcpio
-application/x-cdlink vcd
-application/x-chess-pgn pgn
-application/x-compress
-application/x-cpio cpio
-application/x-csh csh
-application/x-director dcr dir dxr
-application/x-dvi dvi
-application/x-futuresplash spl
-application/x-gtar gtar
-application/x-gzip
-application/x-hdf hdf
-application/x-javascript js
-application/x-koan skp skd skt skm
-application/x-latex latex
-application/x-netcdf nc cdf
-application/x-sh sh
-application/x-shar shar
-application/x-shockwave-flash swf
-application/x-stuffit sit
-application/x-sv4cpio sv4cpio
-application/x-sv4crc sv4crc
-application/x-tar tar
-application/x-tcl tcl
-application/x-tex tex
-application/x-texinfo texinfo texi
-application/x-troff t tr roff
-application/x-troff-man man
-application/x-troff-me me
-application/x-troff-ms ms
-application/x-ustar ustar
-application/x-wais-source src
-application/x400-bp
-application/xml
-application/xml-dtd
-application/xml-external-parsed-entity
-application/zip zip
-audio/32kadpcm
-audio/basic au snd
-audio/g.722.1
-audio/l16
-audio/midi mid midi kar
-audio/mp4a-latm
-audio/mpa-robust
-audio/mpeg mpga mp2 mp3
-audio/parityfec
-audio/prs.sid
-audio/telephone-event
-audio/tone
-audio/vnd.cisco.nse
-audio/vnd.cns.anp1
-audio/vnd.cns.inf1
-audio/vnd.digital-winds
-audio/vnd.everad.plj
-audio/vnd.lucent.voice
-audio/vnd.nortel.vbk
-audio/vnd.nuera.ecelp4800
-audio/vnd.nuera.ecelp7470
-audio/vnd.nuera.ecelp9600
-audio/vnd.octel.sbc
-audio/vnd.qcelp
-audio/vnd.rhetorex.32kadpcm
-audio/vnd.vmx.cvsd
-audio/x-aiff aif aiff aifc
-audio/x-mpegurl m3u
-audio/x-pn-realaudio ram rm
-audio/x-pn-realaudio-plugin rpm
-audio/x-realaudio ra
-audio/x-wav wav
-chemical/x-pdb pdb
-chemical/x-xyz xyz
-image/bmp bmp
-image/cgm
-image/g3fax
-image/gif gif
-image/ief ief
-image/jpeg jpeg jpg jpe
-image/naplps
-image/png png
-image/prs.btif
-image/prs.pti
-image/tiff tiff tif
-image/vnd.cns.inf2
-image/vnd.dwg
-image/vnd.dxf
-image/vnd.fastbidsheet
-image/vnd.fpx
-image/vnd.fst
-image/vnd.fujixerox.edmics-mmr
-image/vnd.fujixerox.edmics-rlc
-image/vnd.mix
-image/vnd.net-fpx
-image/vnd.svf
-image/vnd.wap.wbmp wbmp
-image/vnd.xiff
-image/x-cmu-raster ras
-image/x-portable-anymap pnm
-image/x-portable-bitmap pbm
-image/x-portable-graymap pgm
-image/x-portable-pixmap ppm
-image/x-rgb rgb
-image/x-xbitmap xbm
-image/x-xpixmap xpm
-image/x-xwindowdump xwd
-message/delivery-status
-message/disposition-notification
-message/external-body
-message/http
-message/news
-message/partial
-message/rfc822
-message/s-http
-model/iges igs iges
-model/mesh msh mesh silo
-model/vnd.dwf
-model/vnd.flatland.3dml
-model/vnd.gdl
-model/vnd.gs-gdl
-model/vnd.gtw
-model/vnd.mts
-model/vnd.vtu
-model/vrml wrl vrml
-multipart/alternative
-multipart/appledouble
-multipart/byteranges
-multipart/digest
-multipart/encrypted
-multipart/form-data
-multipart/header-set
-multipart/mixed
-multipart/parallel
-multipart/related
-multipart/report
-multipart/signed
-multipart/voice-message
-text/calendar
-text/css css
-text/directory
-text/enriched
-text/html html htm
-text/parityfec
-text/plain asc txt
-text/prs.lines.tag
-text/rfc822-headers
-text/richtext rtx
-text/rtf rtf
-text/sgml sgml sgm
-text/tab-separated-values tsv
-text/t140
-text/uri-list
-text/vnd.DMClientScript
-text/vnd.IPTC.NITF
-text/vnd.IPTC.NewsML
-text/vnd.abc
-text/vnd.curl
-text/vnd.flatland.3dml
-text/vnd.fly
-text/vnd.fmi.flexstor
-text/vnd.in3d.3dml
-text/vnd.in3d.spot
-text/vnd.latex-z
-text/vnd.motorola.reflex
-text/vnd.ms-mediapackage
-text/vnd.wap.si
-text/vnd.wap.sl
-text/vnd.wap.wml wml
-text/vnd.wap.wmlscript wmls
-text/x-setext etx
-text/x-server-parsed-html shtml
-text/xml xml xsl
-text/xml-external-parsed-entity
-video/mp4v-es
-video/mpeg mpeg mpg mpe
-video/parityfec
-video/pointer
-video/quicktime qt mov
-video/vnd.fvt
-video/vnd.motorola.video
-video/vnd.motorola.videop
-video/vnd.mpegurl mxu
-video/vnd.mts
-video/vnd.nokia.interleaved-multimedia
-video/vnd.vivo
-video/x-msvideo avi
-video/x-sgi-movie movie
-x-conference/x-cooltalk ice
-
-
-
diff --git a/lib/percept/priv/server_root/css/percept.css b/lib/percept/priv/server_root/css/percept.css
deleted file mode 100644
index 2d0734b6b6..0000000000
--- a/lib/percept/priv/server_root/css/percept.css
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-/* Globals */
-html, body {
- margin: 0;
- padding: 0;
- font: 12px Verdana;
- background: #7a83a2;
-}
-
-table {
- border-collapse: collapse;
- /*width: 100%;*/
-}
-
-tr.even {
- background-color: #ffffff; color: black;
-}
-
-tr.odd {
- background-color: #def2ef; color: black;
-}
-
-td {
- text-valign: top;
- text-align: right;
- font: 14px Verdana;
-}
-
-th {
- letter-spacing: 2px;
- text-align: right;
- padding: 4px 4px 4px 8px;
-}
-
-a {
- color: yellow;
- text-decoration: none;
-}
-
-a:hover {
- text-decoration: underline;
-}
-
-td a {
- color: #101010;
-}
-
-img {
- border: 0;
-}
-
-
-/* Header and footer stuff */
-
-#header {
- font: bold 24px Verdana;
- padding-left: 156px;
- padding-right: 156px;
- padding-top: 10px;
- padding-bottom: 10px;
- height: 74px;
- text-align: right;
- background: #7a83a2;
-}
-
-#footer {
- font: 12px Verdana;
- position: relative;
- padding: 5px;
- border-top: 1px solid black;
- clear:left;
-}
-
-
-/* Content stuff */
-
-#content {
- background: #fefefe;
- position: relative;
- padding: 5px 25px 5px 25px;
- margin: 0px 60px 0px 60px;
- border-top: 1px solid #383a32;
- border-left: 1px solid #383a32;
- border-right: 1px solid #383a32;
- border-bottom: 1px solid #383a32;
-}
-
-.table_header {
- font-decoration: underline;
- width: 100%;
-}
-
-/* Menu */
-
-#menu {
- margin: 0px 60px 0px 60px;
- height: 30px;
- padding-right: 0px;
- background-image: url('../images/nav.png');
- background-repeat: repeat-x;
- padding-top: 0px;
- border-top: 1px solid #383a32;
- border-left: 1px solid #383a32;
- border-right: 1px solid #383a32;
-}
-
-.menu_tabs {
- overflow: hidden;
-}
-
-.menu_tabs ul {
- margin: 0;
- padding: 0;
- font: bold 12px Verdana;
- list-style-type: none;
-}
-
-.menu_tabs li {
- display: inline;
- margin: 0;
- background-repeat: repeat-x;
-}
-
-.menu_tabs li a {
- float: right;
- display: block;
- text-decoration: none;
- margin: 0;
- padding: 8px; 7px 8px; 3px;
- border-left: 1px solid #777487;
-}
-
-.menu_tabs li a:visited {
- color: black;
-}
-
-.menu_tabs li a:hover {
- background: #cae8ea;
-}
-
-.menu_tabs li a:selected .menu_tabs li a:active {
- background: yellow;
-}
diff --git a/lib/percept/priv/server_root/htdocs/index.html b/lib/percept/priv/server_root/htdocs/index.html
deleted file mode 100644
index f7322cba89..0000000000
--- a/lib/percept/priv/server_root/htdocs/index.html
+++ /dev/null
@@ -1,41 +0,0 @@
-<!--
- %CopyrightBegin%
-
- Copyright Ericsson AB 2007-2016. All Rights Reserved.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- %CopyrightEnd%
--->
-<html>
-<head>
- <title>percept</title>
- <meta http-equiv="Content-Type" content="text/html;charset=iso-8859-1" />
- <link href="/css/percept.css" rel="stylesheet" type="text/css">
-</head>
-
-<body>
- <div id="header"><a href=/index.html>percept</a></div>
- <div id="menu" class="menu_tabs">
- <ul>
- <li><a href=/cgi-bin/percept_html/databases_page>databases</a></li>
- <li><a href=/cgi-bin/percept_html/processes_page>processes</a></li>
- <li><a href=/cgi-bin/percept_html/page>overview</a></li>
- </ul>
- </div>
- <div id="content">
- <p>Percept - Erlang Concurrency Profiling Tool</p>
- </div>
-</body>
-</html>
-
diff --git a/lib/percept/priv/server_root/images/nav.png b/lib/percept/priv/server_root/images/nav.png
deleted file mode 100644
index d136e806b1..0000000000
--- a/lib/percept/priv/server_root/images/nav.png
+++ /dev/null
Binary files differ
diff --git a/lib/percept/priv/server_root/images/white.png b/lib/percept/priv/server_root/images/white.png
deleted file mode 100644
index 94381b429d..0000000000
--- a/lib/percept/priv/server_root/images/white.png
+++ /dev/null
Binary files differ
diff --git a/lib/percept/priv/server_root/scripts/percept_area_select.js b/lib/percept/priv/server_root/scripts/percept_area_select.js
deleted file mode 100644
index 83fbb02c92..0000000000
--- a/lib/percept/priv/server_root/scripts/percept_area_select.js
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-function size_image(img, src) {
- percept_content = document.getElementById("content");
- var width = percept_content.offsetWidth - 120;
- var imgfile = "/cgi-bin/percept_graph/" + src + "&width=" + width;
- img.src = imgfile;
- img.onload = '';
-}
-
-function load_image() {
- var percept_graph = document.getElementById("percept_graph");
- if (percept_graph) {
- percept_content = document.getElementById("content");
- var width = percept_content.offsetWidth - 50;
- var height = max(screen.height - 550, 600);
- var rmin = document.form_area.data_min.value;
- var rmax = document.form_area.data_max.value;
-
- percept_graph.style.backgroundImage = "url('/cgi-bin/percept_graph/graph" +
- "?range_min=" + rmin +
- "&range_max=" + rmax +
- "&width=" + width +
- "&height=" + height + "')";
- percept_graph.style.width = width;
- percept_graph.style.height = height;
- }
-}
-
-function select_image() {
- var Graph = document.getElementById("percept_graph");
- if (Graph) {
- var GraphIndex = document.form_area.graph_select.selectedIndex;
- var GraphSelectValue = document.form_area.graph_select.options[GraphIndex].value;
- Graph.style.backgroundImage = "url('" + GraphSelectValue +"')";
- }
-}
-
-function select_down(event) {
- var Graf = document.getElementById("percept_graph");
- var Area = document.getElementById("percept_areaselect");
- var x = event.offsetX?(event.offsetX):event.pageX-Graf.offsetLeft;
- x = x - 60;
-
- var width = Graf.offsetWidth;
- var height = Graf.offsetHeight;
- var margin = 20;
-
- var Xmin = document.form_area.data_min.value;
- var Xmax = document.form_area.data_max.value;
-
- // Trim edges
-
- if ( x < margin ) {
- x = margin;
- }
-
- if ( x > width - margin ) {
- x = width - margin;
- }
-
- Area.style.left = x;
- Area.style.top = height - margin;
- Area.style.width = 1;
- Area.style.height = margin;
- Area.moving = true;
- Area.bgcolor = "#00ff00";
- Area.style.visibility = "visible";
- Area.style.borderRight = "1px solid #000"
- Area.style.borderLeft = "1px solid #000"
- Area.style.opacity = 0.65;
- Area.style.filter = 'alpha(opacity=65)';
- var RangeMin = convert_image2graph(x, Xmin, Xmax, margin, width - margin);
- if (RangeMin == 0) document.form_area.range_min.value = 0.0;
- else document.form_area.range_min.value = RangeMin;
-}
-
- function select_move(event) {
- var Graf = document.getElementById("percept_graph");
- var Area = document.getElementById("percept_areaselect");
- var x = event.offsetX?(event.offsetX):event.pageX-Graf.offsetLeft;
- x = x - 60;
- if (Area.moving == true) {
-
- var width = Graf.offsetWidth;
- var height = Graf.offsetHeight;
- var margin = 20;
- var Xmin = document.form_area.data_min.value;
- var Xmax = document.form_area.data_max.value;
-
- // Trim edges
-
- if ( x < margin ) {
- x = margin;
- }
-
- if ( x > width - margin ) {
- x = width - margin;
- }
-
- var x0 = min(x, Area.offsetLeft);
- var x1 = max(x, Area.offsetLeft);
- var w = (x1 - x0);
- Area.style.left = x0;
- Area.style.width = w;
- var RangeMin = convert_image2graph(x0, Xmin, Xmax, margin, width - margin);
- var RangeMax = convert_image2graph(x1, Xmin, Xmax, margin, width - margin);
- Area.style.visibility = "visible";
-
- if (RangeMin == 0) document.form_area.range_min.value = 0.0;
- else document.form_area.range_min.value = RangeMin;
- if (RangeMax == 0) document.form_area.range_max.value = 0.0;
- else document.form_area.range_max.value = RangeMax;
- }
-}
-
-function select_up(event) {
- var Graf = document.getElementById("percept_graph");
- var Area = document.getElementById("percept_areaselect");
- var x = event.offsetX?(event.offsetX):event.pageX-Graf.offsetLeft;
-
- x = x - 60;
- var width = Graf.offsetWidth;
- var height = Graf.offsetHeight;
- var margin = 20;
- var Xmin = document.form_area.data_min.value;
- var Xmax = document.form_area.data_max.value;
-
- // Trim edges
-
- if ( x < margin ) {
- x = margin;
- }
-
- if ( x > width - margin ) {
- x = width - margin;
- }
-
- var w = (x - Area.style.offsetLeft);
-
- Area.moving = false;
- Area.style.width = w;
- var RangeMax = convert_image2graph(x, Xmin, Xmax, margin, width - margin);
- if (RangeMax == 0) document.form_area.range_max.value = 0.0;
- else document.form_area.range_max.value = RangeMax;
-}
-
-function min(A, B) {
- if (A > B) return B;
- else return A;
-}
-
-function max(A,B) {
- if (A > B) return A;
- else return B;
-}
-
-function convert_image2graph(X, Xmin, Xmax, X0, X1) {
- var ImageWidth = X1 - X0;
- var RangeWidth = Xmax - Xmin;
- var DX = RangeWidth/ImageWidth;
- var Xprime = (X - X0)*DX + Xmin*1.0;
- return Xprime;
-}
diff --git a/lib/percept/priv/server_root/scripts/percept_error_handler.js b/lib/percept/priv/server_root/scripts/percept_error_handler.js
deleted file mode 100644
index dad8f2b566..0000000000
--- a/lib/percept/priv/server_root/scripts/percept_error_handler.js
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-var onerror=handleErr;
-
-function handleErr(msg,url,l) {
- var txt = "Error: " + msg + "\nURL: " + url + "\nCode line: " + l;
- alert(txt);
-}
diff --git a/lib/percept/priv/server_root/scripts/percept_select_all.js b/lib/percept/priv/server_root/scripts/percept_select_all.js
deleted file mode 100644
index c8eb966059..0000000000
--- a/lib/percept/priv/server_root/scripts/percept_select_all.js
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-function selectall() {
- for (var i = 0; i < document.process_select.elements.length; i++) {
- var e = document.process_select.elements[i];
- if ((e.name != 'select_all') && (e.type == 'checkbox')) {
- e.checked = document.process_select.select_all.checked;
- }
- }
-}
diff --git a/lib/percept/src/Makefile b/lib/percept/src/Makefile
deleted file mode 100644
index b2ec87d08c..0000000000
--- a/lib/percept/src/Makefile
+++ /dev/null
@@ -1,108 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../vsn.mk
-VSN=$(PERCEPT_VSN)
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/percept-$(VSN)
-
-# ----------------------------------------------------
-# Common Macros
-# ----------------------------------------------------
-
-MODULES= \
- egd \
- egd_png \
- egd_font \
- egd_render \
- egd_primitives \
- percept \
- percept_db \
- percept_html \
- percept_image \
- percept_graph \
- percept_analyzer
-
-
-#HRL_FILES= ../include/
-
-INTERNAL_HRL_FILES= egd.hrl percept.hrl
-
-ERL_FILES= $(MODULES:%=%.erl)
-
-TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
-
-APP_FILE= percept.app
-
-APP_SRC= $(APP_FILE).src
-APP_TARGET= $(EBIN)/$(APP_FILE)
-
-APPUP_FILE= percept.appup
-
-APPUP_SRC= $(APPUP_FILE).src
-APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-ERL_COMPILE_FLAGS += +warn_unused_vars -I../include
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-debug opt: $(TARGET_FILES)
-
-clean:
- rm -f $(TARGET_FILES)
- rm -f errs core *~
-
-$(APP_TARGET): $(APP_SRC) ../vsn.mk
- $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@
-
-$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
- $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@
-
-docs:
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
- $(INSTALL_DIR) "$(RELSYSDIR)/src"
- $(INSTALL_DATA) $(ERL_FILES) "$(RELSYSDIR)/src"
- $(INSTALL_DATA) $(INTERNAL_HRL_FILES) "$(RELSYSDIR)/src"
-# $(INSTALL_DIR) "$(RELSYSDIR)/include"
-# $(INSTALL_DATA) $(HRL_FILES) "$(RELSYSDIR)/include"
- $(INSTALL_DIR) "$(RELSYSDIR)/ebin"
- $(INSTALL_DATA) $(TARGET_FILES) "$(RELSYSDIR)/ebin"
-
-release_docs_spec:
-
diff --git a/lib/percept/src/egd.erl b/lib/percept/src/egd.erl
deleted file mode 100644
index fe52da71f1..0000000000
--- a/lib/percept/src/egd.erl
+++ /dev/null
@@ -1,275 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc egd - erlang graphical drawer
-%%
-%%
-
--module(egd).
-
--export([create/2, destroy/1, information/1]).
--export([text/5, line/4, color/1, color/2]).
--export([rectangle/4, filledRectangle/4, filledEllipse/4]).
--export([arc/4, arc/5]).
--export([render/1, render/2, render/3]).
-
--export([filledTriangle/5, polygon/3]).
-
--export([save/2]).
-
--include("egd.hrl").
-
-%%==========================================================================
-%% Type definitions
-%%==========================================================================
-
-%% @type egd_image()
-%% @type font()
-%% @type point() = {integer(), integer()}
-%% @type color()
-%% @type render_option() = {render_engine, opaque} | {render_engine, alpha}
-
--type egd_image() :: pid().
--type point() :: {non_neg_integer(), non_neg_integer()}.
--type render_option() :: {'render_engine', 'opaque'} | {'render_engine', 'alpha'}.
--type color() :: {float(), float(), float(), float()}.
-
-%%==========================================================================
-%% Interface functions
-%%==========================================================================
-
-%% @spec create(integer(), integer()) -> egd_image()
-%% @doc Creates an image area and returns its reference.
-
--spec create(Width :: integer(), Height :: integer()) -> egd_image().
-
-create(Width,Height) ->
- spawn_link(fun() -> init(trunc(Width),trunc(Height)) end).
-
-
-%% @spec destroy(egd_image()) -> ok
-%% @doc Destroys the image.
-
--spec destroy(Image :: egd_image()) -> ok.
-
-destroy(Image) ->
- cast(Image, destroy).
-
-
-%% @spec render(egd_image()) -> binary()
-%% @equiv render(Image, png, [{render_engine, opaque}])
-
--spec render(Image :: egd_image()) -> binary().
-
-render(Image) ->
- render(Image, png, [{render_engine, opaque}]).
-
-%% @spec render(egd_image(), png | raw_bitmap) -> binary()
-%% @equiv render(Image, Type, [{render_engine, opaque}])
-
-render(Image, Type) ->
- render(Image, Type, [{render_engine, opaque}]).
-
-%% @spec render(egd_image(), png | raw_bitmap, [render_option()]) -> binary()
-%% @doc Renders a binary from the primitives specified by egd_image(). The
-%% binary can either be a raw bitmap with rgb tripplets or a binary in png
-%% format.
-
--spec render(
- Image :: egd_image(),
- Type :: 'png' | 'raw_bitmap' | 'eps',
- Options :: [render_option()]) -> binary().
-
-render(Image, Type, Options) ->
- {render_engine, RenderType} = proplists:lookup(render_engine, Options),
- call(Image, {render, Type, RenderType}).
-
-
-%% @spec information(egd_image()) -> ok
-%% @hidden
-%% @doc Writes out information about the image. This is a debug feature
-%% mainly.
-
-information(Pid) ->
- cast(Pid, information).
-
-%% @spec line(egd_image(), point(), point(), color()) -> ok
-%% @doc Creates a line object from P1 to P2 in the image.
-
--spec line(
- Image :: egd_image(),
- P1 :: point(),
- P2 :: point(),
- Color :: color()) -> 'ok'.
-
-line(Image, P1, P2, Color) ->
- cast(Image, {line, P1, P2, Color}).
-
-%% @spec color( Value | Name ) -> color()
-%% where
-%% Value = {byte(), byte(), byte()} | {byte(), byte(), byte(), byte()}
-%% Name = black | silver | gray | white | maroon | red | purple | fuchia | green | lime | olive | yellow | navy | blue | teal | aqua
-%% @doc Creates a color reference.
-
--spec color(Value :: {byte(), byte(), byte()} | {byte(), byte(), byte(), byte()} | atom()) ->
- color().
-
-color(Color) ->
- egd_primitives:color(Color).
-
-%% @spec color(egd_image(), {byte(), byte(), byte()}) -> color()
-%% @doc Creates a color reference.
-%% @hidden
-
-color(_Image, Color) ->
- egd_primitives:color(Color).
-
-%% @spec text(egd_image(), point(), font(), string(), color()) -> ok
-%% @doc Creates a text object.
-
-text(Image, P, Font, Text, Color) ->
- cast(Image, {text, P, Font, Text, Color}).
-
-%% @spec rectangle(egd_image(), point(), point(), color()) -> ok
-%% @doc Creates a rectangle object.
-
-rectangle(Image, P1, P2, Color) ->
- cast(Image, {rectangle, P1, P2, Color}).
-
-%% @spec filledRectangle(egd_image(), point(), point(), color()) -> ok
-%% @doc Creates a filled rectangle object.
-
-filledRectangle(Image, P1, P2, Color) ->
- cast(Image, {filled_rectangle, P1, P2, Color}).
-
-%% @spec filledEllipse(egd_image(), point(), point(), color()) -> ok
-%% @doc Creates a filled ellipse object.
-
-filledEllipse(Image, P1, P2, Color) ->
- cast(Image, {filled_ellipse, P1, P2, Color}).
-
-%% @spec filledTriangle(egd_image(), point(), point(), point(), color()) -> ok
-%% @hidden
-%% @doc Creates a filled triangle object.
-
-filledTriangle(Image, P1, P2, P3, Color) ->
- cast(Image, {filled_triangle, P1, P2, P3, Color}).
-
-%% @spec polygon(egd_image(), [point()], color()) -> ok
-%% @hidden
-%% @doc Creates a filled filled polygon object.
-
-polygon(Image, Pts, Color) ->
- cast(Image, {polygon, Pts, Color}).
-
-%% @spec arc(egd_image(), point(), point(), color()) -> ok
-%% @hidden
-%% @doc Creates an arc with radius of bbx corner.
-
-arc(Image, P1, P2, Color) ->
- cast(Image, {arc, P1, P2, Color}).
-
-%% @spec arc(egd_image(), point(), point(), integer(), color()) -> ok
-%% @hidden
-%% @doc Creates an arc.
-
-arc(Image, P1, P2, D, Color) ->
- cast(Image, {arc, P1, P2, D, Color}).
-
-%% @spec save(binary(), string()) -> ok
-%% @doc Saves the binary to file.
-
-save(Binary, Filename) when is_binary(Binary) ->
- ok = file:write_file(Filename, Binary),
- ok.
-% ---------------------------------
-% Aux functions
-% ---------------------------------
-
-cast(Pid, Command) ->
- Pid ! {egd, self(), Command},
- ok.
-
-call(Pid, Command) ->
- Pid ! {egd, self(), Command},
- receive {egd, Pid, Result} -> Result end.
-
-% ---------------------------------
-% Server loop
-% ---------------------------------
-
-init(W,H) ->
- Image = egd_primitives:create(W,H),
- loop(Image).
-
-loop(Image) ->
- receive
- % Quitting
- {egd, _Pid, destroy} -> ok;
-
- % Rendering
- {egd, Pid, {render, BinaryType, RenderType}} ->
- case BinaryType of
- raw_bitmap ->
- Bitmap = egd_render:binary(Image, RenderType),
- Pid ! {egd, self(), Bitmap},
- loop(Image);
- eps ->
- Eps = egd_render:eps(Image),
- Pid ! {egd, self(), Eps},
- loop(Image);
- png ->
- Bitmap = egd_render:binary(Image, RenderType),
- Png = egd_png:binary(
- Image#image.width,
- Image#image.height,
- Bitmap),
- Pid ! {egd, self(), Png},
- loop(Image);
- Unhandled ->
- Pid ! {egd, self(), {error, {format, Unhandled}}},
- loop(Image)
- end;
-
- % Drawing primitives
- {egd, _Pid, {line, P1, P2, C}} ->
- loop(egd_primitives:line(Image, P1, P2, C));
- {egd, _Pid, {text, P, Font, Text, C}} ->
- loop(egd_primitives:text(Image, P, Font, Text, C));
- {egd, _Pid, {filled_ellipse, P1, P2, C}} ->
- loop(egd_primitives:filledEllipse(Image, P1, P2, C));
- {egd, _Pid, {filled_rectangle, P1, P2, C}} ->
- loop(egd_primitives:filledRectangle(Image, P1, P2, C));
- {egd, _Pid, {filled_triangle, P1, P2, P3, C}} ->
- loop(egd_primitives:filledTriangle(Image, P1, P2, P3, C));
- {egd, _Pid, {polygon, Pts, C}} ->
- loop(egd_primitives:polygon(Image, Pts, C));
- {egd, _Pid, {arc, P1, P2, C}} ->
- loop(egd_primitives:arc(Image, P1, P2, C));
- {egd, _Pid, {arc, P1, P2, D, C}} ->
- loop(egd_primitives:arc(Image, P1, P2, D, C));
- {egd, _Pid, {rectangle, P1, P2, C}} ->
- loop(egd_primitives:rectangle(Image, P1, P2, C));
- {egd, _Pid, information} ->
- egd_primitives:info(Image),
- loop(Image);
- _ ->
- loop(Image)
- end.
diff --git a/lib/percept/src/egd_font.erl b/lib/percept/src/egd_font.erl
deleted file mode 100644
index ef1cc434df..0000000000
--- a/lib/percept/src/egd_font.erl
+++ /dev/null
@@ -1,173 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc egd_font
-%%
-
--module(egd_font).
-
--export([load/1, size/1, glyph/2]).
--include("egd.hrl").
-
-%% Font represenatation in ets table
-%% egd_font_table
-%%
-%% Information:
-%% {Key, Description, Size}
-%% Key :: {Font :: atom(), information}
-%% Description :: any(), Description header from font file
-%% Size :: {W :: integer(), H :: integer()}
-%%
-%% Glyphs:
-%% {Key, Translation LSs} where
-%% Key :: {Font :: atom(), Code :: integer()}, Code = glyph char code
-%% Translation :: {
-%% W :: integer(), % BBx width
-%% H :: integer(), % BBx height
-%% X0 :: integer(), % X start
-%% Y0 :: integer(), % Y start
-%% Xm :: integer(), % Glyph X move when drawing
-%% }
-%% LSs :: [[{Xl :: integer(), Xr :: integer()}]]
-%% The first list is height (top to bottom), the inner list is the list
-%% of line spans for the glyphs horizontal pixels.
-%%
-
-%%==========================================================================
-%% Interface functions
-%%==========================================================================
-
-size(Font) ->
- [{_Key, _Description, Size}] = ets:lookup(egd_font_table,{Font,information}),
- Size.
-
-glyph(Font, Code) ->
- [{_Key, Translation, LSs}] = ets:lookup(egd_font_table,{Font,Code}),
- {Translation, LSs}.
-
-load(Filename) ->
- {ok, Bin} = file:read_file(Filename),
- Font = erlang:binary_to_term(Bin),
- load_font_header(Font).
-
-%%==========================================================================
-%% Internal functions
-%%==========================================================================
-
-%% ETS handler functions
-
-initialize_table() ->
- egd_font_table = ets:new(egd_font_table, [named_table, ordered_set, public]),
- ok.
-
-glyph_insert(Font, Code, Translation, LSs) ->
- Element = {{Font, Code}, Translation, LSs},
- ets:insert(egd_font_table, Element).
-
-font_insert(Font, Description, Dimensions) ->
- Element = {{Font, information}, Description, Dimensions},
- ets:insert(egd_font_table, Element).
-
-%% Font loader functions
-
-is_font_loaded(Font) ->
- try
- case ets:lookup(egd_font_table, {Font, information}) of
- [] -> false;
- _ -> true
- end
- catch
- error:_ ->
- initialize_table(),
- false
- end.
-
-
-load_font_header({_Type, _Version, Font}) ->
- load_font_body(Font).
-
-load_font_body({Key,Desc,W,H,Glyphs,Bitmaps}) ->
- case is_font_loaded(Key) of
- true -> Key;
- false ->
- % insert dimensions
- font_insert(Key, Desc, {W,H}),
- parse_glyphs(Glyphs, Bitmaps, Key),
- Key
- end.
-
-parse_glyphs([], _ , _Key) -> ok;
-parse_glyphs([Glyph|Glyphs], Bs, Key) ->
- {Code, Translation, LSs} = parse_glyph(Glyph, Bs),
- glyph_insert(Key, Code, Translation, LSs),
- parse_glyphs(Glyphs, Bs, Key).
-
-parse_glyph({Code,W,H,X0,Y0,Xm,Offset}, Bitmasks) ->
- BytesPerLine = ((W+7) div 8),
- NumBytes = BytesPerLine*H,
- <<_:Offset/binary,Bitmask:NumBytes/binary,_/binary>> = Bitmasks,
- LSs = render_glyph(W,H,X0,Y0,Xm,Bitmask),
- {Code, {W,H,X0,Y0,Xm}, LSs}.
-
-render_glyph(W, H, X0, Y0, Xm, Bitmask) ->
- render_glyph(W,{0,H},X0,Y0,Xm,Bitmask, []).
-render_glyph(_W, {H,H}, _X0, _Y0, _Xm, _Bitmask, Out) -> Out;
-render_glyph(W, {Hi,H}, X0, Y0,Xm, Bitmask , LSs) ->
- N = ((W+7) div 8),
- O = N*Hi,
- <<_:O/binary, Submask/binary>> = Bitmask,
- LS = render_glyph_horizontal(
- Submask, % line glyph bitmask
- {down, W - 1}, % loop state
- W - 1, % Width
- []), % Linespans
- render_glyph(W,{Hi+1,H},X0,Y0,Xm, Bitmask, [LS|LSs]).
-
-render_glyph_horizontal(Value, {Pr, Px}, 0, Spans) ->
- Cr = bit_spin(Value, 0),
- case {Pr,Cr} of
- {up , up } -> % closure of interval since its last
- [{0, Px}|Spans];
- {up , down} -> % closure of interval
- [{1, Px}|Spans];
- {down, up } -> % beginning of interval
- [{0, 0}|Spans];
- {down, down} -> % no change in interval
- Spans
- end;
-render_glyph_horizontal(Value, {Pr, Px}, Cx, Spans) ->
- Cr = bit_spin(Value, Cx),
- case {Pr,Cr} of
- {up , up } -> % no change in interval
- render_glyph_horizontal(Value, {Cr, Px}, Cx - 1, Spans);
- {up , down} -> % closure of interval
- render_glyph_horizontal(Value, {Cr, Cx}, Cx - 1, [{Cx+1,Px}|Spans]);
- {down, up } -> % beginning of interval
- render_glyph_horizontal(Value, {Cr, Cx}, Cx - 1, Spans);
- {down, down} -> % no change in interval
- render_glyph_horizontal(Value, {Cr, Px}, Cx - 1, Spans)
- end.
-
-bit_spin(Value, Cx) ->
- <<_:Cx, Bit:1, _/bits>> = Value,
- case Bit of
- 1 -> up;
- 0 -> down
- end.
diff --git a/lib/percept/src/egd_png.erl b/lib/percept/src/egd_png.erl
deleted file mode 100644
index fe660513b4..0000000000
--- a/lib/percept/src/egd_png.erl
+++ /dev/null
@@ -1,105 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-
-%% This code was originally written by Dan Gudmundsson for png-handling in
-%% wings3d (e3d__png).
-%%
-%% @doc egd
-%%
-
--module(egd_png).
-
--export([binary/3]).
-
--include("egd.hrl").
-
--define(MAGIC, 137,$P,$N,$G,$\r,$\n,26,$\n).
-
--define(GREYSCALE, 0).
--define(TRUECOLOUR, 2).
--define(INDEXED, 3).
--define(GREYSCALE_A, 4).
--define(TRUECOLOUR_A,6).
-
--define(MAX_WBITS,15).
-
--define(CHUNK, 240).
-
--define(get4p1(Idx),((Idx) bsr 4)).
--define(get4p2(Idx),((Idx) band 16#0F)).
--define(get2p1(Idx),((Idx) bsr 6)).
--define(get2p2(Idx),(((Idx) bsr 4) band 3)).
--define(get2p3(Idx),(((Idx) bsr 2) band 3)).
--define(get2p4(Idx),((Idx) band 3)).
--define(get1p1(Idx),((Idx) bsr 7)).
--define(get1p2(Idx),(((Idx) bsr 6) band 1)).
--define(get1p3(Idx),(((Idx) bsr 5) band 1)).
--define(get1p4(Idx),(((Idx) bsr 4) band 1)).
--define(get1p5(Idx),(((Idx) bsr 3) band 1)).
--define(get1p6(Idx),(((Idx) bsr 2) band 1)).
--define(get1p7(Idx),(((Idx) bsr 1) band 1)).
--define(get1p8(Idx),((Idx) band 1)).
-
-binary(W, H, Bitmap) when is_binary(Bitmap) ->
- Z = zlib:open(),
- Binary = bitmap2png(W, H, Bitmap, Z),
- zlib:close(Z),
- Binary.
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-% Begin Tainted
-
-bitmap2png(W, H, Bitmap,Z) ->
- HDR = create_chunk(<<"IHDR",W:32,H:32,8:8,(png_type(r8g8b8)):8,0:8,0:8,0:8>>,Z),
- DATA = create_chunk(["IDAT",compress_image(0,3*W,Bitmap,[])],Z),
- END = create_chunk(<<"IEND">>,Z),
- list_to_binary([?MAGIC,HDR,DATA,END]).
-
-compress_image(I,RowLen, Bin, Acc) ->
- Pos = I*RowLen,
- case Bin of
- <<_:Pos/binary,Row:RowLen/binary,_/binary>> ->
- Filtered = filter_row(Row,RowLen),
- compress_image(I+1,RowLen,Bin,[Filtered|Acc]);
- _ when Pos == size(Bin) ->
- Filtered = list_to_binary(lists:reverse(Acc)),
- Compressed = zlib:compress(Filtered),
- Compressed
- end.
-
-filter_row(Row,_RowLen) ->
- [0,Row].
-
-% dialyzer warnings
-%png_type(g8) -> ?GREYSCALE;
-%png_type(a8) -> ?GREYSCALE;
-%png_type(r8g8b8a8) -> ?TRUECOLOUR_A;
-png_type(r8g8b8) -> ?TRUECOLOUR.
-
-create_chunk(Bin,Z) when is_list(Bin) ->
- create_chunk(list_to_binary(Bin),Z);
-create_chunk(Bin,Z) when is_binary(Bin) ->
- Sz = size(Bin)-4,
- Crc = zlib:crc32(Z,Bin),
- <<Sz:32,Bin/binary,Crc:32>>.
-
-% End tainted
diff --git a/lib/percept/src/egd_primitives.erl b/lib/percept/src/egd_primitives.erl
deleted file mode 100644
index b64189c552..0000000000
--- a/lib/percept/src/egd_primitives.erl
+++ /dev/null
@@ -1,412 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc egd_primitives
-%%
-
-
--module(egd_primitives).
--export([create/2,
- color/1,
- pixel/3,
- polygon/3,
- line/4,
- line/5,
- arc/4,
- arc/5,
- rectangle/4,
- filledRectangle/4,
- filledEllipse/4,
- filledTriangle/5,
- text/5]).
-
--export([info/1,
- object_info/1,
- rgb_float2byte/1]).
-
--export([arc_to_edges/3,
- convex_hull/1,
- edges/1]).
-
--include("egd.hrl").
-
-%% API info
-info(I) ->
- W = I#image.width, H = I#image.height,
- io:format("Dimensions: ~p x ~p~n", [W,H]),
- io:format("Number of image objects: ~p~n", [length(I#image.objects)]),
- TotalPoints = info_objects(I#image.objects,0),
- io:format("Total points: ~p [~p %]~n", [TotalPoints, 100*TotalPoints/(W*H)]),
- ok.
-
-info_objects([],N) -> N;
-info_objects([O | Os],N) ->
- Points = length(O#image_object.points),
-info_objects(Os,N+Points).
-
-object_info(O) ->
- io:format("Object information: ~p~n", [O#image_object.type]),
- io:format("- Number of points: ~p~n", [length(O#image_object.points)]),
- io:format("- Bounding box: ~p~n", [O#image_object.span]),
- io:format("- Color: ~p~n", [O#image_object.color]),
- ok.
-
-%% interface functions
-
-line(#image{objects=Os}=I, Sp, Ep, Color) ->
- line(#image{objects=Os}=I, Sp, Ep, 1, Color).
-
-line(#image{objects=Os}=I, Sp, Ep, Wd, Color) ->
- I#image{objects=[#image_object{
- internals = Wd,
- type = line,
- points = [Sp, Ep],
- span = span([Sp, Ep]),
- color = Color}|Os]}.
-
-arc(I, {Sx,Sy} = Sp, {Ex,Ey} = Ep, Color) ->
- X = Ex - Sx,
- Y = Ey - Sy,
- R = math:sqrt(X*X + Y*Y)/2,
- arc(I, Sp, Ep, R, Color).
-
-arc(#image{objects=Os}=I, Sp, Ep, D, Color) ->
- SpanPts = lists:flatten([
- [{X + D, Y + D},
- {X + D, Y - D},
- {X - D, Y + D},
- {X - D, Y - D}] || {X,Y} <- [Sp,Ep]]),
-
- I#image{objects=[#image_object{
- internals = D,
- type = arc,
- points = [Sp, Ep],
- span = span(SpanPts),
- color = Color}|Os]}.
-
-pixel(#image{objects=Os}=I, Point, Color) ->
- I#image{objects=[#image_object{
- type = pixel,
- points = [Point],
- span = span([Point]),
- color = Color}|Os]}.
-
-rectangle(#image{objects=Os}=I, Sp, Ep, Color) ->
- I#image{objects=[#image_object{
- type = rectangle,
- points = [Sp, Ep],
- span = span([Sp, Ep]),
- color = Color}|Os]}.
-
-filledRectangle(#image{objects=Os}=I, Sp, Ep, Color) ->
- I#image{objects=[#image_object{
- type = filled_rectangle,
- points = [Sp, Ep],
- span = span([Sp, Ep]),
- color = Color}|Os]}.
-
-filledEllipse(#image{objects=Os}=I, Sp, Ep, Color) ->
- {X0,Y0,X1,Y1} = Span = span([Sp, Ep]),
- Xr = (X1 - X0)/2,
- Yr = (Y1 - Y0)/2,
- Xp = - X0 - Xr,
- Yp = - Y0 - Yr,
- I#image{objects=[#image_object{
- internals = {Xp,Yp, Xr*Xr,Yr*Yr},
- type = filled_ellipse,
- points = [Sp, Ep],
- span = Span,
- color = Color}|Os]}.
-
-filledTriangle(#image{objects=Os}=I, P1, P2, P3, Color) ->
- I#image{objects=[#image_object{
- type = filled_triangle,
- points = [P1,P2,P3],
- span = span([P1,P2,P3]),
- color = Color}|Os]}.
-
-polygon(#image{objects=Os}=I, Points, Color) ->
- I#image{objects=[#image_object{
- type = polygon,
- points = Points,
- span = span(Points),
- color = Color}|Os]}.
-
-text(#image{objects=Os}=I, {Xs,Ys}=Sp, Font, Text, Color) ->
- {FW,FH} = egd_font:size(Font),
- Length = length(Text),
- Ep = {Xs + Length*FW, Ys + FH + 5},
- I#image{objects=[#image_object{
- internals = {Font, Text},
- type = text_horizontal,
- points = [Sp],
- span = span([Sp,Ep]),
- color = Color}|Os]}.
-
-create(W, H) ->
- #image{width = W, height = H}.
-
-color(Color) when is_atom(Color) -> rgba_byte2float(name_to_color(Color, 255));
-color({Color, A}) when is_atom(Color) -> rgba_byte2float(name_to_color(Color, A));
-color({R,G,B}) -> rgba_byte2float({R,G,B, 255});
-color(C) -> rgba_byte2float(C).
-
-name_to_color(Color, A) ->
- case Color of
- %% HTML default colors
- black -> { 0, 0, 0, A};
- silver -> {192, 192, 192, A};
- gray -> {128, 128, 128, A};
- white -> {128, 0, 0, A};
- maroon -> {255, 0, 0, A};
- red -> {128, 0, 128, A};
- purple -> {128, 0, 128, A};
- fuchia -> {255, 0, 255, A};
- green -> { 0, 128, 0, A};
- lime -> { 0, 255, 0, A};
- olive -> {128, 128, 0, A};
- yellow -> {255, 255, 0, A};
- navy -> { 0, 0, 128, A};
- blue -> { 0, 0, 255, A};
- teal -> { 0, 128, 0, A};
- aqua -> { 0, 255, 155, A};
-
- %% HTML color extensions
- steelblue -> { 70, 130, 180, A};
- royalblue -> { 4, 22, 144, A};
- cornflowerblue -> {100, 149, 237, A};
- lightsteelblue -> {176, 196, 222, A};
- mediumslateblue -> {123, 104, 238, A};
- slateblue -> {106, 90, 205, A};
- darkslateblue -> { 72, 61, 139, A};
- midnightblue -> { 25, 25, 112, A};
- darkblue -> { 0, 0, 139, A};
- mediumblue -> { 0, 0, 205, A};
- dodgerblue -> { 30, 144, 255, A};
- deepskyblue -> { 0, 191, 255, A};
- lightskyblue -> {135, 206, 250, A};
- skyblue -> {135, 206, 235, A};
- lightblue -> {173, 216, 230, A};
- powderblue -> {176, 224, 230, A};
- azure -> {240, 255, 255, A};
- lightcyan -> {224, 255, 255, A};
- paleturquoise -> {175, 238, 238, A};
- mediumturquoise -> { 72, 209, 204, A};
- lightseagreen -> { 32, 178, 170, A};
- darkcyan -> { 0, 139, 139, A};
- cadetblue -> { 95, 158, 160, A};
- darkturquoise -> { 0, 206, 209, A};
- cyan -> { 0, 255, 255, A};
- turquoise -> { 64, 224, 208, A};
- aquamarine -> {127, 255, 212, A};
- mediumaquamarine -> {102, 205, 170, A};
- darkseagreen -> {143, 188, 143, A};
- mediumseagreen -> { 60, 179, 113, A};
- seagreen -> { 46, 139, 87, A};
- darkgreen -> { 0, 100, 0, A};
- forestgreen -> { 34, 139, 34, A};
- limegreen -> { 50, 205, 50, A};
- chartreuse -> {127, 255, 0, A};
- lawngreen -> {124, 252, 0, A};
- greenyellow -> {173, 255, 47, A};
- yellowgreen -> {154, 205, 50, A};
- palegreen -> {152, 251, 152, A};
- lightgreen -> {144, 238, 144, A};
- springgreen -> { 0, 255, 127, A};
- darkolivegreen -> { 85, 107, 47, A};
- olivedrab -> {107, 142, 35, A};
- darkkhaki -> {189, 183, 107, A};
- darkgoldenrod -> {184, 134, 11, A};
- goldenrod -> {218, 165, 32, A};
- gold -> {255, 215, 0, A};
- khaki -> {240, 230, 140, A};
- palegoldenrod -> {238, 232, 170, A};
- blanchedalmond -> {255, 235, 205, A};
- moccasin -> {255, 228, 181, A};
- wheat -> {245, 222, 179, A};
- navajowhite -> {255, 222, 173, A};
- burlywood -> {222, 184, 135, A};
- tan -> {210, 180, 140, A};
- rosybrown -> {188, 143, 143, A};
- sienna -> {160, 82, 45, A};
- saddlebrown -> {139, 69, 19, A};
- chocolate -> {210, 105, 30, A};
- peru -> {205, 133, 63, A};
- sandybrown -> {244, 164, 96, A};
- darkred -> {139, 0, 0, A};
- brown -> {165, 42, 42, A};
- firebrick -> {178, 34, 34, A};
- indianred -> {205, 92, 92, A};
- lightcoral -> {240, 128, 128, A};
- salmon -> {250, 128, 114, A};
- darksalmon -> {233, 150, 122, A};
- lightsalmon -> {255, 160, 122, A};
- coral -> {255, 127, 80, A};
- tomato -> {255, 99, 71, A};
- darkorange -> {255, 140, 0, A};
- orange -> {255, 165, 0, A};
- orangered -> {255, 69, 0, A};
- crimson -> {220, 20, 60, A};
- deeppink -> {255, 20, 147, A};
- fuchsia -> {255, 0, 255, A};
- magenta -> {255, 0, 255, A};
- hotpink -> {255, 105, 180, A};
- lightpink -> {255, 182, 193, A};
- pink -> {255, 192, 203, A};
- palevioletred -> {219, 112, 147, A};
- mediumvioletred -> {199, 21, 133, A};
- darkmagenta -> {139, 0, 139, A};
- mediumpurple -> {147, 112, 219, A};
- blueviolet -> {138, 43, 226, A};
- indigo -> { 75, 0, 130, A};
- darkviolet -> {148, 0, 211, A};
- darkorchid -> {153, 50, 204, A};
- mediumorchid -> {186, 85, 211, A};
- orchid -> {218, 112, 214, A};
- violet -> {238, 130, 238, A};
- plum -> {221, 160, 221, A};
- thistle -> {216, 191, 216, A};
- lavender -> {230, 230, 250, A};
- ghostwhite -> {248, 248, 255, A};
- aliceblue -> {240, 248, 255, A};
- mintcream -> {245, 255, 250, A};
- honeydew -> {240, 255, 240, A};
- lemonchiffon -> {255, 250, 205, A};
- cornsilk -> {255, 248, 220, A};
- lightyellow -> {255, 255, 224, A};
- ivory -> {255, 255, 240, A};
- floralwhite -> {255, 250, 240, A};
- linen -> {250, 240, 230, A};
- oldlace -> {253, 245, 230, A};
- antiquewhite -> {250, 235, 215, A};
- bisque -> {255, 228, 196, A};
- peachpuff -> {255, 218, 185, A};
- papayawhip -> {255, 239, 213, A};
- beige -> {245, 245, 220, A};
- seashell -> {255, 245, 238, A};
- lavenderblush -> {255, 240, 245, A};
- mistyrose -> {255, 228, 225, A};
- snow -> {255, 250, 250, A};
- whitesmoke -> {245, 245, 245, A};
- gainsboro -> {220, 220, 220, A};
- lightgrey -> {211, 211, 211, A};
- darkgray -> {169, 169, 169, A};
- lightslategray -> {119, 136, 153, A};
- slategray -> {112, 128, 144, A};
- dimgray -> {105, 105, 105, A};
- darkslategray -> { 47, 79, 79, A};
- mediumspringgreen -> { 0, 250, 154, A};
- lightgoldenrodyellow -> {250, 250, 210, A}
- end.
-
-
-%%% Generic transformations
-
-%% arc_to_edges
-%% In:
-%% P1 :: point(),
-%% P2 :: point(),
-%% D :: float(),
-%% Out:
-%% Res :: [edges()]
-
-arc_to_edges(P0, P1, D) when abs(D) < 0.5 -> [{P0,P1}];
-arc_to_edges({X0,Y0}, {X1,Y1}, D) ->
- Vx = X1 - X0,
- Vy = Y1 - Y0,
-
- Mx = X0 + 0.5 * Vx,
- My = Y0 + 0.5 * Vy,
-
- % Scale V by Rs
- L = math:sqrt(Vx*Vx + Vy*Vy),
- Sx = D*Vx/L,
- Sy = D*Vy/L,
-
- Bx = trunc(Mx - Sy),
- By = trunc(My + Sx),
-
- arc_to_edges({X0,Y0}, {Bx,By}, D/4) ++ arc_to_edges({Bx,By}, {X1,Y1}, D/4).
-
-%% edges
-%% In:
-%% Pts :: [point()]
-%% Out:
-%% Edges :: [{point(),point()}]
-
-edges([]) -> [];
-edges([P0|_] = Pts) -> edges(Pts, P0,[]).
-edges([P1], P0, Out) -> [{P1,P0}|Out];
-edges([P1,P2|Pts],P0,Out) -> edges([P2|Pts],P0,[{P1,P2}|Out]).
-
-%% convex_hull
-%% In:
-%% Ps :: [point()]
-%% Out:
-%% Res :: [point()]
-
-convex_hull(Ps) ->
- P0 = lower_right(Ps),
- [P1|Ps1] = lists:sort(fun
- (P2,P1) ->
- case point_side({P1,P0},P2) of
- left -> true;
- _ -> false
- end
- end, Ps -- [P0]),
- convex_hull(Ps1, [P1,P0]).
-
-convex_hull([], W) -> W;
-convex_hull([P|Pts], [P1,P2|W]) ->
- case point_side({P2,P1},P) of
- left -> convex_hull(Pts, [P,P1,P2|W]);
- _ -> convex_hull([P|Pts], [P2|W])
- end.
-
-lower_right([P|Pts]) -> lower_right(P, Pts).
-lower_right(P, []) -> P;
-lower_right({X0,Y0}, [{_,Y}|Pts]) when Y < Y0 -> lower_right({X0,Y0}, Pts);
-lower_right({X0,Y0}, [{X,Y}|Pts]) when X < X0, Y < Y0 -> lower_right({X0,Y0}, Pts);
-lower_right(_,[P|Pts]) -> lower_right(P, Pts).
-
-point_side({{X0,Y0}, {X1, Y1}}, {X2, Y2}) -> point_side((X1 - X0)*(Y2 - Y0) - (X2 - X0)*(Y1 - Y0)).
-point_side(D) when D > 0 -> left;
-point_side(D) when D < 0 -> right;
-point_side(_) -> on_line.
-
-%% AUX
-
-span([{X0,Y0}|Points]) ->
- span(Points,X0,Y0,X0,Y0).
-span([{X0,Y0}|Points],Xmin,Ymin,Xmax,Ymax) ->
- span(Points,erlang:min(Xmin,X0),
- erlang:min(Ymin,Y0),
- erlang:max(Xmax,X0),
- erlang:max(Ymax,Y0));
-span([],Xmin,Ymin,Xmax,Ymax) ->
- {Xmin,Ymin,Xmax,Ymax}.
-
-
-rgb_float2byte({R,G,B}) -> rgb_float2byte({R,G,B,1.0});
-rgb_float2byte({R,G,B,A}) ->
- {trunc(R*255), trunc(G*255), trunc(B*255), trunc(A*255)}.
-
-rgba_byte2float({R,G,B,A}) ->
- {R/255,G/255,B/255,A/255}.
diff --git a/lib/percept/src/egd_render.erl b/lib/percept/src/egd_render.erl
deleted file mode 100644
index 6c708e3e86..0000000000
--- a/lib/percept/src/egd_render.erl
+++ /dev/null
@@ -1,664 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc egd_render
-%%
-
--module(egd_render).
-
--export([binary/1, binary/2]).
--export([eps/1]).
--compile(inline).
-
--export([line_to_linespans/3]).
-
--include("egd.hrl").
--define('DummyC',0).
-
-binary(Image) ->
- binary(Image, opaque).
-
-binary(Image, Type) ->
- parallel_binary(precompile(Image),Type).
-
-parallel_binary(Image = #image{ height = Height },Type) ->
- case erlang:min(erlang:system_info(schedulers), Height) of
- 1 ->
- % if the height or the number of schedulers is 1
- % do the scanlines in this process.
- W = Image#image.width,
- Bg = Image#image.background,
- Os = Image#image.objects,
- erlang:list_to_binary([scanline(Y, Os, {0,0,W - 1, Bg}, Type)
- || Y <- lists:seq(1, Height)]);
- Np ->
- Pids = start_workers(Np, Type),
- Handler = handle_workers(Height, Pids),
- init_workers(Image, Handler, Pids),
- Res = receive_binaries(Height),
- finish_workers(Pids),
- Res
- end.
-
-start_workers(Np, Type) ->
- start_workers(Np, Type, []).
-
-start_workers( 0, _, Pids) -> Pids;
-start_workers(Np, Type, Pids) when Np > 0 ->
- start_workers(Np - 1, Type, [spawn_link(fun() -> worker(Type) end)|Pids]).
-
-worker(Type) ->
- receive
- {Pid, data, #image{ objects = Os, width = W, background = Bg }} ->
- worker(Os, W, Bg, Type, Pid)
- end.
-
-worker(Objects, Width, Bg, Type, Collector) ->
- receive
- {Pid, scan, {Ys, Ye}} ->
- lists:foreach(fun
- (Y) ->
- Bin = erlang:list_to_binary(scanline(Y, Objects, {0,0,Width - 1, Bg}, Type)),
- Collector ! {scan, Y, Bin}
- end, lists:seq(Ys,Ye)),
- Pid ! {self(), scan_complete},
- worker(Objects, Width, Bg, Type, Collector);
- {Pid, scan, Y} ->
- Bin = erlang:list_to_binary(scanline(Y, Objects, {0,0,Width - 1, Bg}, Type)),
- Collector ! {scan, Y, Bin},
- Pid ! {self(), scan_complete},
- worker(Objects, Width, Bg, Type, Collector);
- {_, done} ->
- ok
- end.
-
-init_workers(_Image, _Handler, []) -> ok;
-init_workers(Image, Handler, [Pid|Pids]) ->
- Pid ! {self(), data, Image},
- Handler ! {Pid, scan_complete},
- init_workers(Image, Handler, Pids).
-
-handle_workers(H, Pids) ->
- spawn_link(fun() -> handle_workers(H, H, length(Pids)) end).
-
-handle_workers(_, 0, _) -> ok;
-handle_workers(H, Hi, Np) when H > 0 ->
- N = trunc(Hi/(2*Np)),
- receive
- {Pid, scan_complete} ->
- if N < 2 ->
- Pid ! {self(), scan, Hi},
- handle_workers(H, Hi - 1, Np);
- true ->
- Pid ! {self(), scan, {Hi - N, Hi}},
- handle_workers(H, Hi - 1 - N, Np)
- end
- end.
-
-finish_workers([]) -> ok;
-finish_workers([Pid|Pids]) ->
- Pid ! {self(), done},
- finish_workers(Pids).
-
-receive_binaries(H) ->
- receive_binaries(H, []).
-
-receive_binaries(0, Bins) -> erlang:list_to_binary(Bins);
-receive_binaries(H, Bins) when H > 0 ->
- receive
- {scan, H, Bin} ->
- receive_binaries(H - 1, [Bin|Bins])
- end.
-
-scanline(Y, Os, {_,_,Width,_}=LSB, Type) ->
- OLSs = parse_objects_on_line(Y-1, Width, Os),
- RLSs = resulting_line_spans([LSB|OLSs],Type),
- [ lists:duplicate(Xr - Xl + 1, <<(trunc(R*255)):8,(trunc(G*255)):8,(trunc(B*255)):8>>) || {_,Xl, Xr, {R,G,B,_}} <- RLSs ].
-
-resulting_line_spans(LSs,Type) ->
- %% Build a list of "transitions" from left to right.
- Trans = line_spans_to_trans(LSs),
- %% Convert list of "transitions" to linespans.
- trans_to_line_spans(Trans,Type).
-
-line_spans_to_trans(LSs) ->
- line_spans_to_trans(LSs,[],0).
-
-line_spans_to_trans([],Db,_) ->
- lists:sort(Db);
-line_spans_to_trans([{_,L,R,C}|LSs],Db,Z) ->
- line_spans_to_trans(LSs,[{{L,Z,start},C},{{R+1,Z,stop},C}|Db],Z+1).
-
-trans_to_line_spans(Trans,Type) ->
- trans_to_line_spans(simplify_trans(Trans,Type,[],{0.0,0.0,0.0,0.0},[])).
-
-trans_to_line_spans(SimpleTrans) ->
- trans_to_line_spans1(SimpleTrans,[]).
-
-trans_to_line_spans1([],Spans) ->
- Spans;
-trans_to_line_spans1([_],Spans) ->
- Spans;
-trans_to_line_spans1([{L1,_},{L2,C2}|SimpleTrans],Spans) ->
- %% We are going backwards now...
- trans_to_line_spans1([{L2,C2}|SimpleTrans],[{?DummyC,L2,L1-1,C2}|Spans]).
-
-simplify_trans([],_,_,_,Acc) ->
- Acc;
-simplify_trans([{{L,_,_},_}|_] = Trans,Type,Layers,OldC,Acc) ->
- {NextTrans,RestTrans} =
- lists:splitwith(fun({{L1,_,_},_}) when L1 == L ->
- true;
- (_) ->
- false
- end, Trans),
- {C,NewLayers} = color(NextTrans,Layers,Type,OldC),
- case OldC of
- C -> %% No change in color, so transition unnecessary.
- simplify_trans(RestTrans,Type,NewLayers,OldC,Acc);
- _ ->
- simplify_trans(RestTrans,Type,NewLayers,C,[{L,C}|Acc])
- end.
-
-color(Trans,Layers,Type,OldC) ->
- case modify_layers(Layers,Trans) of
- Layers ->
- {OldC,Layers};
- NewLayers ->
- {color(NewLayers,Type),NewLayers}
- end.
-
-color([],_) -> {0.0,0.0,0.0,0.0};
-color([{_,C}|_],opaque) -> C;
-color(Layers,alpha) -> color1({0.0,0.0,0.0,0.0},Layers).
-
-color1(Color,[]) -> Color;
-color1(Color,[{_,C}|Layers]) -> color1(alpha_blend(Color,C),Layers).
-
-modify_layers(Layers,[]) -> Layers;
-modify_layers(Layers,[{{_,Z,start},C}|Trans]) ->
- modify_layers(add_layer(Layers, Z, C), Trans);
-modify_layers(Layers,[{{_,Z,stop },C}|Trans]) ->
- modify_layers(remove_layer(Layers, Z, C), Trans).
-
-add_layer([{Z1,_}=H|Layers],Z,C) when Z1 > Z ->
- [H|add_layer(Layers,Z,C)];
-add_layer(Layers,Z,C) ->
- [{Z,C}|Layers].
-
-remove_layer(Layers,Z,C) ->
- Layers -- [{Z,C}].
-
-alpha_blend({R1,G1,B1,A1}, {R2,G2,B2,A2}) when is_float(A1), is_float(A2)->
- Beta = A2*(1.0 - A1),
- A = A1 + Beta,
- R = R1*A1 + R2*Beta,
- G = G1*A1 + G2*Beta,
- B = B1*A1 + B2*Beta,
- {R,G,B,A}.
-
-parse_objects_on_line(Y, Width, Objects) ->
- parse_objects_on_line(Y, 1, Width, Objects, []).
-parse_objects_on_line(_Y, _Z, _, [], Out) -> lists:flatten(Out);
-parse_objects_on_line(Y, Z, Width, [O|Os], Out) ->
- case is_object_on_line(O, Y) of
- false ->
- parse_objects_on_line(Y, Z + 1, Width, Os, Out);
- true ->
- OLs = object_line_data(O,Y,Z),
- TOLs = trim_object_line_data(OLs, Width),
- parse_objects_on_line(Y, Z + 1, Width, Os, [TOLs|Out])
- end.
-
-trim_object_line_data(OLs, Width) ->
- trim_object_line_data(OLs, Width, []).
-trim_object_line_data([], _, Out) -> Out;
-
-trim_object_line_data([{_, Xl, _, _}|OLs], Width, Out) when Xl > Width ->
- trim_object_line_data(OLs, Width, Out);
-trim_object_line_data([{_, _, Xr, _}|OLs], Width, Out) when Xr < 0 ->
- trim_object_line_data(OLs, Width, Out);
-trim_object_line_data([{Z, Xl, Xr, C}|OLs], Width, Out) ->
- trim_object_line_data(OLs, Width, [{Z, erlang:max(0,Xl), erlang:min(Xr,Width), C}|Out]).
-
-% object_line_data
-% In:
-% Object :: image_object()
-% Y :: index of height
-% Z :: index of depth
-% Out:
-% OLs = [{Z, Xl, Xr, Color}]
-% Z = index of height
-% Xl = left X index
-% Xr = right X index
-% Purpose:
-% Calculate the length (start and finish index) of an objects horizontal
-% line given the height index.
-
-object_line_data(#image_object{type=rectangle,
- span={X0,Y0,X1,Y1}, color=C}, Y, Z) ->
- if
- Y0 =:= Y ; Y1 =:= Y ->
- [{Z, X0, X1, C}];
- true ->
- [{Z, X0, X0, C},
- {Z, X1, X1, C}]
- end;
-
-object_line_data(#image_object{type=filled_rectangle,
- span={X0, _, X1, _}, color=C}, _Y, Z) ->
- [{Z, X0, X1, C}];
-
-object_line_data(#image_object{type=filled_ellipse,
- internals={Xr,Yr,Yr2}, span={X0,Y0,X1,Y1}, color=C}, Y, Z) ->
- if
- X1 - X0 =:= 0; Y1 - Y0 =:= 0 ->
- [{Z, X0, X1, C}];
- true ->
- Yo = trunc(Y - Y0 - Yr),
- Yo2 = Yo*Yo,
- Xo = math:sqrt((1 - Yo2/Yr2))*Xr,
- [{Z, round(X0 - Xo + Xr), round(X0 + Xo + Xr), C}]
- end;
-
-object_line_data(#image_object{type=filled_triangle,
- intervals=Is, color=C}, Y, Z) ->
- case lists:keyfind(Y, 1, Is) of
- {Y, Xl, Xr} -> [{Z, Xl, Xr, C}];
- false -> []
- end;
-
-object_line_data(#image_object{type=line,
- intervals=M, color={R,G,B,_}}, Y, Z) ->
- case M of
- #{Y := Ls} -> [{Z, Xl, Xr, {R,G,B,1.0-C/255}}||{Xl,Xr,C} <- Ls];
- _ -> []
- end;
-
-object_line_data(#image_object{type=polygon,
- color=C, intervals=Is}, Y, Z) ->
- [{Z, Xl, Xr, C} || {Yp, Xl, Xr} <- Is, Yp =:= Y];
-
-object_line_data(#image_object{type=text_horizontal,
- color=C, intervals=Is}, Y, Z) ->
- [{Z, Xl, Xr, C} || {Yg, Xl, Xr} <- Is, Yg =:= Y];
-
-object_line_data(#image_object{type=pixel,
- span={X0,_,X1,_}, color=C}, _, Z) ->
- [{Z, X0, X1, C}].
-
-is_object_on_line(#image_object{span={_,Y0,_,Y1}}, Y) ->
- if Y < Y0; Y > Y1 -> false;
- true -> true
- end.
-
-%%% primitives to line_spans
-
-%% compile objects to linespans
-
-precompile(#image{objects = Os}=I) ->
- I#image{objects = precompile_objects(Os)}.
-
-precompile_objects([]) -> [];
-precompile_objects([#image_object{type=line, internals=W, points=[P0,P1]}=O|Os]) ->
- [O#image_object{intervals = linespans_to_map(line_to_linespans(P0,P1,W))}|precompile_objects(Os)];
-precompile_objects([#image_object{type=filled_triangle, points=[P0,P1,P2]}=O|Os]) ->
- [O#image_object{intervals = triangle_ls(P0,P1,P2)}|precompile_objects(Os)];
-precompile_objects([#image_object{type=polygon, points=Pts}=O|Os]) ->
- [O#image_object{intervals = polygon_ls(Pts)}|precompile_objects(Os)];
-precompile_objects([#image_object{type=filled_ellipse, span={X0,Y0,X1,Y1}}=O|Os]) ->
- Xr = (X1 - X0)/2,
- Yr = (Y1 - Y0)/2,
- Yr2 = Yr*Yr,
- [O#image_object{internals={Xr,Yr,Yr2}}|precompile_objects(Os)];
-precompile_objects([#image_object{type=arc, points=[P0,P1], internals=D}=O|Os]) ->
- Es = egd_primitives:arc_to_edges(P0, P1, D),
- Ls = lists:foldl(fun ({Ep0,Ep1},M) ->
- linespans_to_map(line_to_linespans(Ep0,Ep1,1),M)
- end, #{}, Es),
- [O#image_object{type=line, intervals=Ls}|precompile_objects(Os)];
-precompile_objects([#image_object{type=text_horizontal,
- points=[P0], internals={Font,Text}}=O|Os]) ->
- [O#image_object{intervals=text_horizontal_ls(P0,Font,Text)}|precompile_objects(Os)];
-precompile_objects([O|Os]) ->
- [O|precompile_objects(Os)].
-
-% triangle
-
-triangle_ls(P1,P2,P3) ->
- % Find top point (or left most top point),
- % From that point, two lines will be drawn to the
- % other points.
- % For each Y step,
- % bresenham_line_interval for each of the two lines
- % Find the left most and the right most for those lines
- % At an end point, a new line to the point already being drawn
- % repeat same procedure as above
- [Sp1, Sp2, Sp3] = tri_pt_ysort([P1,P2,P3]),
- triangle_ls_lp(tri_ls_ysort(line_to_linespans(Sp1,Sp2,1)), Sp2,
- tri_ls_ysort(line_to_linespans(Sp1,Sp3,1)), Sp3, []).
-
-% There will be Y mismatches between the two lists since bresenham is not perfect.
-% I can be remedied with checking intervals this could however be costly and
-% it may not be necessary, depending on how exact we need the points to be.
-% It should at most differ by one and endpoints should be fine.
-
-triangle_ls_lp([],_,[],_,Out) -> Out;
-triangle_ls_lp(LSs1, P1, [], P2, Out) ->
- SLSs = tri_ls_ysort(line_to_linespans(P2,P1,1)),
- N2 = length(SLSs),
- N1 = length(LSs1),
- if
- N1 > N2 ->
- [_|ILSs] = LSs1,
- triangle_ls_lp(ILSs, SLSs, Out);
- N2 > N1 ->
- [_|ILSs] = SLSs,
- triangle_ls_lp(LSs1, ILSs, Out);
- true ->
- triangle_ls_lp(LSs1, SLSs, Out)
- end;
-triangle_ls_lp([], P1, LSs2, P2, Out) ->
- SLSs = tri_ls_ysort(line_to_linespans(P1,P2,1)),
- N1 = length(SLSs),
- N2 = length(LSs2),
- if
- N1 > N2 ->
- [_|ILSs] = SLSs,
- triangle_ls_lp(ILSs, LSs2, Out);
- N2 > N1 ->
- [_|ILSs] = LSs2,
- triangle_ls_lp(SLSs, ILSs, Out);
- true ->
- triangle_ls_lp(SLSs, LSs2, Out)
- end;
-triangle_ls_lp([LS1|LSs1],P1,[LS2|LSs2],P2, Out) ->
- {Y, Xl1, Xr1,_Ca1} = LS1,
- {_, Xl2, Xr2,_Ca2} = LS2,
- Xr = lists:max([Xl1,Xr1,Xl2,Xr2]),
- Xl = lists:min([Xl1,Xr1,Xl2,Xr2]),
- triangle_ls_lp(LSs1,P1,LSs2,P2,[{Y,Xl,Xr}|Out]).
-
-triangle_ls_lp([],[],Out) -> Out;
-triangle_ls_lp([],_,Out) -> Out;
-triangle_ls_lp(_,[],Out) -> Out;
-triangle_ls_lp([LS1|LSs1], [LS2|LSs2], Out) ->
- {Y, Xl1, Xr1, _Ca1} = LS1,
- {_, Xl2, Xr2, _Ca2} = LS2,
- Xr = lists:max([Xl1,Xr1,Xl2,Xr2]),
- Xl = lists:min([Xl1,Xr1,Xl2,Xr2]),
- triangle_ls_lp(LSs1,LSs2,[{Y,Xl,Xr}|Out]).
-
-tri_pt_ysort(Pts) ->
- % {X,Y}
- lists:sort(
- fun ({_,Y1},{_,Y2}) ->
- if Y1 > Y2 -> false; true -> true end
- end, Pts).
-
-tri_ls_ysort(LSs) ->
- % {Y, Xl, Xr, Ca}
- lists:sort(
- fun ({Y1,_,_,_},{Y2,_,_,_}) ->
- if Y1 > Y2 -> false; true -> true end
- end, LSs).
-
-% polygon_ls
-% In:
-% Pts :: [{X,Y}]
-% Out:
-% LSs :: [{Y,Xl,Xr}]
-% Purpose:
-% Make polygon line spans
-% Algorithm:
-% 1. Find the left most (lm) point
-% 2. Find the two points adjacent to that point
-% The tripplet will make a triangle
-% 3. Ensure no points lies within the triangle
-% 4a.No points within triangle,
-% make triangle,
-% remove lm point
-% 1.
-% 4b.point(s) within triangle,
-%
-
-
-polygon_ls(Pts) ->
- % Make triangles
- Tris = polygon_tri(Pts),
- % interval triangles
- lists:flatten(polygon_tri_ls(Tris, [])).
-
-polygon_tri_ls([], Out) -> Out;
-polygon_tri_ls([{P1,P2,P3}|Tris], Out) ->
- polygon_tri_ls(Tris, [triangle_ls(P1,P2,P3)|Out]).
-
-polygon_tri(Pts) ->
- polygon_tri(polygon_lm_pt(Pts), []).
-
-
-polygon_tri([P1,P2,P3],Tris) -> [{P1,P2,P3}|Tris];
-polygon_tri([P2,P1,P3|Pts], Tris) ->
- case polygon_tri_test(P1,P2,P3,Pts) of
- false -> polygon_tri(polygon_lm_pt([P2,P3|Pts]), [{P1,P2,P3}|Tris]);
- [LmPt|Ptsn] -> polygon_tri([P2,P1,LmPt,P3|Ptsn], Tris)
- end.
-
-polygon_tri_test(P1,P2,P3, Pts) ->
- polygon_tri_test(P1,P2,P3, Pts, []).
-
-polygon_tri_test(_,_,_, [], _) -> false;
-polygon_tri_test(P1,P2,P3,[Pt|Pts], Ptsr) ->
- case point_inside_triangle(Pt, P1,P2,P3) of
- false -> polygon_tri_test(P1,P2,P3, Pts, [Pt|Ptsr]);
- true -> [Pt|Pts] ++ lists:reverse(Ptsr)
- end.
-
-% polygon_lm_pt
-% In:
-% Pts :: [{X,Y}]
-% Out
-% LmPts = [{X0,Y0},{Xmin,Y0},{X1,Y1},...]
-% Purpose:
-% The order of the list is important
-% rotate the elements until Xmin is first
-% This is not extremly fast.
-
-polygon_lm_pt(Pts) ->
- Xs = [X||{X,_}<-Pts],
- polygon_lm_pt(Pts, lists:min(Xs), []).
-
-polygon_lm_pt([Pt0,{X,_}=Ptm | Pts], Xmin, Ptsr) when X > Xmin ->
- polygon_lm_pt([Ptm|Pts], Xmin, [Pt0|Ptsr]);
-polygon_lm_pt(Pts, _, Ptsr) ->
- Pts ++ lists:reverse(Ptsr).
-
-
-% return true if P is inside triangle (p1,p2,p3),
-% otherwise false.
-
-points_same_side({P1x,P1y}, {P2x,P2y}, {L1x,L1y}, {L2x,L2y}) ->
- ((P1x - L1x)*(L2y - L1y) - (L2x - L1x)*(P1y - L1y) *
- (P2x - L1x)*(L2y - L1y) - (L2x - L1x)*(P2y - L1y)) >= 0.
-
-point_inside_triangle(P, P1, P2, P3) ->
- points_same_side(P, P1, P2, P3) and
- points_same_side(P, P2, P1, P3) and
- points_same_side(P, P3, P1, P2).
-
-%% [{Y, Xl, Xr}] -> #{Y := [{Xl,Xr}]}
-%% Reorganize linspans to a map with Y as key.
-
-linespans_to_map(Ls) ->
- linespans_to_map(Ls,#{}).
-linespans_to_map([{Y,Xl,Xr,C}|Ls], M) ->
- case M of
- #{Y := Spans} -> linespans_to_map(Ls, M#{Y := [{Xl,Xr,C}|Spans]});
- _ -> linespans_to_map(Ls, M#{Y => [{Xl,Xr,C}]})
- end;
-linespans_to_map([], M) ->
- M.
-
-
-%% line_to_linespans
-%% Anti-aliased thick line
-%% Do it CPS style
-%% In:
-%% P1 :: point()
-%% P2 :: point()
-%% Out:
-%% [{Y,Xl,Xr}]
-%%
-line_to_linespans({X0,Y0},{X1,Y1},Wd) ->
- Dx = abs(X1-X0),
- Dy = abs(Y1-Y0),
- Sx = if X0 < X1 -> 1; true -> -1 end,
- Sy = if Y0 < Y1 -> 1; true -> -1 end,
- E0 = Dx - Dy,
- Ed = if Dx + Dy =:= 0 -> 1; true -> math:sqrt(Dx*Dx + Dy*Dy) end,
- line_to_ls(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E0,Ed,(Wd+1)/2,[]).
-
-line_to_ls(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0) ->
- C = max(0, 255*(abs(E - Dx+Dy)/Ed - Wd + 1)),
- Ls1 = [{Y0,X0,X0,C}|Ls0],
- line_to_ls_sx(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls1,E).
-
-line_to_ls_sx(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2) when 2*E2 > -Dx ->
- line_to_ls_sx_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2+Dy,Y0);
-line_to_ls_sx(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2) ->
- line_to_ls_sy(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2,X0).
-
-line_to_ls_sx_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,E2,Y) when E2 < Ed*Wd andalso
- (Y1 =/= Y orelse Dx > Dy) ->
- Y2 = Y + Sy,
- C = max(0,255*(abs(E2)/Ed-Wd+1)),
- Ls = [{Y2,X0,X0,C}|Ls0],
- line_to_ls_sx_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2+Dx,Y2);
-line_to_ls_sx_do(X0,_Y0,X1,_Y1,_Dx,_Dy,_Sx,_Sy,_E,_Ed,_Wd,Ls,_E2,_Y) when X0 =:= X1 ->
- Ls;
-line_to_ls_sx_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,_E2,_Y) ->
- line_to_ls_sy(X0+Sx,Y0,X1,Y1,Dx,Dy,Sx,Sy,E-Dy,Ed,Wd,Ls,E,X0).
-
-line_to_ls_sy(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,E2,X) when 2*E2 =< Dy ->
- line_to_ls_sy_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,Dx-E2,X);
-line_to_ls_sy(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,_E2,_X) ->
- line_to_ls(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0).
-
-line_to_ls_sy_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,E2,X) when E2 < Ed*Wd andalso
- (X1 =/= X orelse Dx < Dy) ->
- X2 = X + Sx,
- C = max(0,255*(abs(E2)/Ed-Wd+1)),
- Ls = [{Y0,X2,X2,C}|Ls0],
- line_to_ls_sy_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2+Dy,X2);
-line_to_ls_sy_do(_X0,Y0,_X1,Y1,_Dx,_Dy,_Sx,_Sy,_E,_Ed,_Wd,Ls,_E2,_X) when Y0 =:= Y1 ->
- Ls;
-line_to_ls_sy_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,_E2,_X) ->
- line_to_ls(X0,Y0+Sy,X1,Y1,Dx,Dy,Sx,Sy,E+Dx,Ed,Wd,Ls0).
-
-% Text
-
-text_horizontal_ls(Point, Font, Chars) ->
- {_Fw,Fh} = egd_font:size(Font),
- text_intervals(Point, Fh, Font, Chars, []).
-
-% This is stupid. The starting point is the top left (Ptl) but the font
-% offsets is relative to the bottom right origin,
-% {Xtl,Ytl} -------------------------
-% | |
-% | Glyph BoundingBox |
-% | -------- |
-% | |Bitmap| Gh |
-% FH |-Gx0-|Data | |
-% | -------- |
-% | | |
-% | Gy0 |
-% | | |
-% Glyph (0,0)------------------------- Gxm (Glyph X move)
-% FW
-% Therefore, we need Yo, which is Yo = FH - Gy0 - Gh,
-% Font height minus Glyph Y offset minus Glyph bitmap data boundingbox
-% height.
-
-text_intervals( _, _, _, [], Out) -> lists:flatten(Out);
-text_intervals({Xtl,Ytl}, Fh, Font, [Code|Chars], Out) ->
- {{_Gw, Gh, Gx0, Gy0, Gxm}, LSs} = egd_font:glyph(Font, Code),
- % Set offset points from translation matrix to point in TeInVe.
- Yo = Fh - Gh + Gy0,
- GLSs = text_intervals_vertical({Xtl+Gx0,Ytl+Yo},LSs, []),
- text_intervals({Xtl+Gxm,Ytl}, Fh, Font, Chars, [GLSs|Out]).
-
-text_intervals_vertical( _, [], Out) -> Out;
-text_intervals_vertical({Xtl, Ytl}, [LS|LSs], Out) ->
- H = lists:foldl(
- fun ({Xl,Xr}, RLSs) ->
- [{Ytl, Xl + Xtl, Xr + Xtl}|RLSs]
- end, [], LS),
- text_intervals_vertical({Xtl, Ytl+1}, LSs, [H|Out]).
-
-
-%%% E. PostScript implementation
-
-eps(#image{ objects = Os, width = W, height = H}) ->
- list_to_binary([eps_header(W,H),eps_objects(H,Os),eps_footer()]).
-
-eps_objects(H,Os) -> eps_objects(H,Os, []).
-eps_objects(_,[], Out) -> lists:flatten(Out);
-eps_objects(H,[O|Os], Out) -> eps_objects(H,Os, [eps_object(H,O)|Out]).
-
-eps_object(H,#image_object{ type = text_horizontal, internals = {_Font,Text}, points = [{X,Y}], color={R,G,B,_}}) ->
- s("/Times-Roman findfont\n14 scalefont\nsetfont\n~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p moveto\n(~s) show~n",
- [R,G,B,X,H-(Y + 10), Text]);
-eps_object(H,#image_object{ type = filled_ellipse, points = [{X1,Y1p},{X2,Y2p}], color={R,G,B,_}}) ->
- Y1 = H - Y1p,
- Y2 = H - Y2p,
- Xr = trunc((X2-X1)/2),
- Yr = trunc((Y2-Y1)/2),
- Cx = X1 + Xr,
- Cy = Y1 + Yr,
- s("~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p ~p ~p 0 360 ellipse fill\n",
- [R,G,B,Cx,Cy,Xr,Yr]);
-eps_object(H,#image_object{ type = arc, points = [P0, P1], internals = D, color={R,G,B,_}}) ->
- Es = egd_primitives:arc_to_edges(P0, P1, D),
- [s("~.4f ~.4f ~.4f setrgbcolor\n", [R,G,B])|lists:foldl(fun
- ({{X1,Y1},{X2,Y2}}, Eps) ->
- [s("newpath\n~p ~p moveto\n~p ~p lineto\n1 setlinewidth\nstroke\n", [X1,H-Y1,X2,H-Y2])|Eps]
- end, [], Es)];
-
-eps_object(H,#image_object{ type = line, points = [{X1,Y1}, {X2,Y2}], color={R,G,B,_}}) ->
- s("~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p moveto\n~p ~p lineto\n1 setlinewidth\nstroke\n",
- [R,G,B,X1,H-Y1,X2,H-Y2]);
-eps_object(H,#image_object{ type = rectangle, points = [{X1,Y1}, {X2,Y2}], color={R,G,B,_}}) ->
- s("~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p moveto\n~p ~p lineto\n~p ~p lineto\n~p ~p lineto\n~p ~p lineto\n1 setlinewidth\nstroke\n",
- [R,G,B,X1,H-Y1,X2,H-Y1,X2,H-Y2,X1,H-Y2,X1,H-Y1]);
-eps_object(H,#image_object{ type = filled_rectangle, points = [{X1,Y1}, {X2,Y2}], color={R,G,B,_}}) ->
- s("~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p moveto\n~p ~p lineto\n~p ~p lineto\n~p ~p lineto\n~p ~p lineto\nclosepath\nfill\n",
- [R,G,B,X1,H-Y1,X2,H-Y1,X2,H-Y2,X1,H-Y2,X1,H-Y1]);
-eps_object(_,_) -> "".
-
-s(Format, Terms) -> lists:flatten(io_lib:format(Format, Terms)).
-
-eps_header(W,H) ->
- s("%!PS-Adobe-3.0 EPSF-3.0\n%%Creator: Created by egd\n%%BoundingBox: 0 0 ~p ~p\n%%LanguageLevel: 2\n%%Pages: 1\n%%DocumentData: Clean7Bit\n",[W,H]) ++
- "%%BeginProlog\n/ellipse {7 dict begin\n/endangle exch def\n/startangle exch def\n/yradius exch def\n/xradius exch def\n/yC exch def\n/xC exch def\n"
- "/savematrix matrix currentmatrix def\nxC yC translate\nxradius yradius scale\n0 0 1 startangle endangle arc\nsavematrix setmatrix\nend\n} def\n"
- "%%EndProlog\n".
-
-eps_footer() ->
- "%%EOF\n".
diff --git a/lib/percept/src/percept.erl b/lib/percept/src/percept.erl
deleted file mode 100644
index 25c6ae19b1..0000000000
--- a/lib/percept/src/percept.erl
+++ /dev/null
@@ -1,337 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%%
-%% @doc Percept - Erlang Concurrency Profiling Tool
-%%
-%% This module provides the user interface for the application.
-%%
-
--module(percept).
--behaviour(application).
--export([profile/1,
- profile/2,
- profile/3,
- stop_profile/0,
- start_webserver/0,
- start_webserver/1,
- stop_webserver/0,
- stop_webserver/1,
- analyze/1,
- % Application behaviour
- start/2,
- stop/1]).
-
-
--include("percept.hrl").
-
-%%==========================================================================
-%% Type definitions
-%%==========================================================================
-
-%% @type percept_option() = procs | ports | exclusive
-
--type percept_option() :: 'procs' | 'ports' | 'exclusive' | 'scheduler'.
-
-%%==========================================================================
-%% Application callback functions
-%%==========================================================================
-
-%% @spec start(Type, Args) -> {started, Hostname, Port} | {error, Reason}
-%% @doc none
-%% @hidden
-
-start(_Type, _Args) ->
- %% start web browser service
- start_webserver(0).
-
-%% @spec stop(State) -> ok
-%% @doc none
-%% @hidden
-
-stop(_State) ->
- %% stop web browser service
- stop_webserver(0).
-
-%%==========================================================================
-%% Interface functions
-%%==========================================================================
-
-%% @spec profile(Filename::string()) -> {ok, Port} | {already_started, Port}
-%% @see percept_profile
-
-%% profiling
-
--spec profile(Filename :: file:filename()) ->
- {'ok', port()} | {'already_started', port()}.
-
-profile(Filename) ->
- percept_profile:start(Filename, [procs]).
-
-%% @spec profile(Filename::string(), [percept_option()]) -> {ok, Port} | {already_started, Port}
-%% @see percept_profile
-
--spec profile(Filename :: file:filename(),
- Options :: [percept_option()]) ->
- {'ok', port()} | {'already_started', port()}.
-
-profile(Filename, Options) ->
- percept_profile:start(Filename, Options).
-
-%% @spec profile(Filename::string(), MFA::mfa(), [percept_option()]) -> ok | {already_started, Port} | {error, not_started}
-%% @see percept_profile
-
--spec profile(Filename :: file:filename(),
- Entry :: {atom(), atom(), list()},
- Options :: [percept_option()]) ->
- 'ok' | {'already_started', port()} | {'error', 'not_started'}.
-
-profile(Filename, MFA, Options) ->
- percept_profile:start(Filename, MFA, Options).
-
--spec stop_profile() -> 'ok' | {'error', 'not_started'}.
-
-%% @spec stop_profile() -> ok | {'error', 'not_started'}
-%% @see percept_profile
-
-stop_profile() ->
- percept_profile:stop().
-
-%% @spec analyze(string()) -> ok | {error, Reason}
-%% @doc Analyze file.
-
--spec analyze(Filename :: file:filename()) ->
- 'ok' | {'error', any()}.
-
-analyze(Filename) ->
- case percept_db:start() of
- {started, DB} ->
- parse_and_insert(Filename,DB);
- {restarted, DB} ->
- parse_and_insert(Filename,DB)
- end.
-
-%% @spec start_webserver() -> {started, Hostname, Port} | {error, Reason}
-%% Hostname = string()
-%% Port = integer()
-%% Reason = term()
-%% @doc Starts webserver.
-
--spec start_webserver() ->
- {'started', string(), pos_integer()} | {'error', any()}.
-
-start_webserver() ->
- start_webserver(0).
-
-%% @spec start_webserver(integer()) -> {started, Hostname, AssignedPort} | {error, Reason}
-%% Hostname = string()
-%% AssignedPort = integer()
-%% Reason = term()
-%% @doc Starts webserver. If port number is 0, an available port number will
-%% be assigned by inets.
-
--spec start_webserver(Port :: non_neg_integer()) ->
- {'started', string(), pos_integer()} | {'error', any()}.
-
-start_webserver(Port) when is_integer(Port) ->
- ok = ensure_loaded(percept),
- case whereis(percept_httpd) of
- undefined ->
- {ok, Config} = get_webserver_config("percept", Port),
- ok = application:ensure_started(inets),
- case inets:start(httpd, Config) of
- {ok, Pid} ->
- AssignedPort = find_service_port_from_pid(inets:services_info(), Pid),
- {ok, Host} = inet:gethostname(),
- %% workaround until inets can get me a service from a name.
- Mem = spawn(fun() -> service_memory({Pid,AssignedPort,Host}) end),
- register(percept_httpd, Mem),
- {started, Host, AssignedPort};
- {error, Reason} ->
- {error, {inets, Reason}}
- end;
- _ ->
- {error, already_started}
- end.
-
-%% @spec stop_webserver() -> ok | {error, not_started}
-%% @doc Stops webserver.
-
-stop_webserver() ->
- case whereis(percept_httpd) of
- undefined ->
- {error, not_started};
- Pid ->
- do_stop([], Pid)
- end.
-
-do_stop([], Pid)->
- Pid ! {self(), get_port},
- Port = receive P -> P end,
- do_stop(Port, Pid);
-do_stop(Port, [])->
- case whereis(percept_httpd) of
- undefined ->
- {error, not_started};
- Pid ->
- do_stop(Port, Pid)
- end;
-do_stop(Port, Pid)->
- case find_service_pid_from_port(inets:services_info(), Port) of
- undefined ->
- {error, not_started};
- Pid2 ->
- Pid ! quit,
- inets:stop(httpd, Pid2)
- end.
-
-%% @spec stop_webserver(integer()) -> ok | {error, not_started}
-%% @doc Stops webserver of the given port.
-%% @hidden
-
-stop_webserver(Port) ->
- do_stop(Port,[]).
-
-%%==========================================================================
-%% Auxiliary functions
-%%==========================================================================
-
-%% parse_and_insert
-
-parse_and_insert(Filename, DB) ->
- io:format("Parsing: ~p ~n", [Filename]),
- T0 = erlang:monotonic_time(millisecond),
- Pid = dbg:trace_client(file, Filename, mk_trace_parser(self())),
- Ref = erlang:monitor(process, Pid),
- parse_and_insert_loop(Filename, Pid, Ref, DB, T0).
-
-parse_and_insert_loop(Filename, Pid, Ref, DB, T0) ->
- receive
- {'DOWN',Ref,process, Pid, noproc} ->
- io:format("Incorrect file or malformed trace file: ~p~n", [Filename]),
- {error, file};
- {parse_complete, {Pid, Count}} ->
- receive {'DOWN', Ref, process, Pid, normal} -> ok after 0 -> ok end,
- DB ! {action, consolidate},
- T1 = erlang:monotonic_time(millisecond),
- io:format("Parsed ~w entries in ~w ms.~n", [Count, T1 - T0]),
- io:format(" ~p created processes.~n", [length(percept_db:select({information, procs}))]),
- io:format(" ~p opened ports.~n", [length(percept_db:select({information, ports}))]),
- ok;
- {'DOWN',Ref, process, Pid, normal} -> parse_and_insert_loop(Filename, Pid, Ref, DB, T0);
- {'DOWN',Ref, process, Pid, Reason} -> {error, Reason}
- end.
-
-mk_trace_parser(Pid) ->
- {fun trace_parser/2, {0, Pid}}.
-
-trace_parser(end_of_trace, {Count, Pid}) ->
- Pid ! {parse_complete, {self(),Count}},
- receive
- {ack, Pid} ->
- ok
- end;
-trace_parser(Trace, {Count, Pid}) ->
- percept_db:insert(Trace),
- {Count + 1, Pid}.
-
-find_service_pid_from_port([], _) ->
- undefined;
-find_service_pid_from_port([{_, Pid, Options} | Services], Port) ->
- case lists:keyfind(port, 1, Options) of
- false ->
- find_service_pid_from_port(Services, Port);
- {port, Port} ->
- Pid
- end.
-
-find_service_port_from_pid([], _) ->
- undefined;
-find_service_port_from_pid([{_, Pid, Options} | _], Pid) ->
- case lists:keyfind(port, 1, Options) of
- false ->
- undefined;
- {port, Port} ->
- Port
- end;
-find_service_port_from_pid([{_, _, _} | Services], Pid) ->
- find_service_port_from_pid(Services, Pid).
-
-%% service memory
-
-service_memory({Pid, Port, Host}) ->
- receive
- quit ->
- ok;
- {Reply, get_port} ->
- Reply ! Port,
- service_memory({Pid, Port, Host});
- {Reply, get_host} ->
- Reply ! Host,
- service_memory({Pid, Port, Host});
- {Reply, get_pid} ->
- Reply ! Pid,
- service_memory({Pid, Port, Host})
- end.
-
-% Create config data for the webserver
-
-get_webserver_config(Servername, Port) when is_list(Servername), is_integer(Port) ->
- Path = code:priv_dir(percept),
- Root = filename:join([Path, "server_root"]),
- MimeTypesFile = filename:join([Root,"conf","mime.types"]),
- {ok, MimeTypes} = httpd_conf:load_mime_types(MimeTypesFile),
- Config = [
- % Roots
- {server_root, Root},
- {document_root,filename:join([Root, "htdocs"])},
-
- % Aliases
- {eval_script_alias,{"/eval",[io]}},
- {erl_script_alias,{"/cgi-bin",[percept_graph,percept_html,io]}},
- {script_alias,{"/cgi-bin/", filename:join([Root, "cgi-bin"])}},
- {alias,{"/javascript/",filename:join([Root, "scripts"]) ++ "/"}},
- {alias,{"/images/", filename:join([Root, "images"]) ++ "/"}},
- {alias,{"/css/", filename:join([Root, "css"]) ++ "/"}},
-
- % Configs
- {default_type,"text/plain"},
- {directory_index,["index.html"]},
- {mime_types, MimeTypes},
- {modules,[mod_alias,
- mod_esi,
- mod_actions,
- mod_cgi,
- mod_dir,
- mod_get,
- mod_head
- ]},
- {com_type,ip_comm},
- {server_name, Servername},
- {bind_address, any},
- {port, Port}],
- {ok, Config}.
-
-ensure_loaded(App) ->
- case application:load(App) of
- ok -> ok;
- {error,{already_loaded,App}} -> ok;
- Error -> Error
- end.
diff --git a/lib/percept/src/percept.hrl b/lib/percept/src/percept.hrl
deleted file mode 100644
index 58926cd1b4..0000000000
--- a/lib/percept/src/percept.hrl
+++ /dev/null
@@ -1,53 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--define(seconds(EndTs,StartTs), timer:now_diff(EndTs, StartTs)/1000000).
-
-%%% ------------------- %%%
-%%% Type definitions %%%
-%%% ------------------- %%%
-
--type timestamp() :: {non_neg_integer(), non_neg_integer(), non_neg_integer()}.
--type true_mfa() :: {atom(), atom(), byte() | list()}.
--type state() :: 'active' | 'inactive'.
--type scheduler_id() :: {'scheduler_id', non_neg_integer()}.
-
-%%% ------------------- %%%
-%%% Records %%%
-%%% ------------------- %%%
-
--record(activity, {
- timestamp ,%:: timestamp() ,
- id ,%:: pid() | port() | scheduler_id(),
- state = undefined ,%:: state() | 'undefined',
- where = undefined ,%:: true_mfa() | 'undefined',
- runnable_count = 0 %:: non_neg_integer()
- }).
-
--record(information, {
- id ,%:: pid() | port(),
- name = undefined ,%:: atom() | string() | 'undefined',
- entry = undefined ,%:: true_mfa() | 'undefined',
- start = undefined ,%:: timestamp() | 'undefined',
- stop = undefined ,%:: timestamp() | 'undefined',
- parent = undefined ,%:: pid() | 'undefined',
- children = [] %:: [pid()]
- }).
-
diff --git a/lib/percept/src/percept_analyzer.erl b/lib/percept/src/percept_analyzer.erl
deleted file mode 100644
index f38d026905..0000000000
--- a/lib/percept/src/percept_analyzer.erl
+++ /dev/null
@@ -1,368 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%% @doc Utility functions to operate on percept data. These functions should
-%% be considered experimental. Behaviour may change in future releases.
-
--module(percept_analyzer).
--export([
- minmax/1,
- waiting_activities/1,
- activities2count/2,
- activities2count/3,
- activities2count2/2,
- analyze_activities/2,
- runnable_count/1,
- runnable_count/2,
- seconds2ts/2,
- minmax_activities/2,
- mean/1
- ]).
-
--include("percept.hrl").
-
-%%==========================================================================
-%%
-%% Interface functions
-%%
-%%==========================================================================
-
-
-%% @spec minmax([{X, Y}]) -> {MinX, MinY, MaxX, MaxY}
-%% X = number()
-%% Y = number()
-%% MinX = number()
-%% MinY = number()
-%% MaxX = number()
-%% MaxY = number()
-%% @doc Returns the min and max of a set of 2-dimensional numbers.
-
-minmax(Data) ->
- Xs = [ X || {X,_Y} <- Data],
- Ys = [ Y || {_X, Y} <- Data],
- {lists:min(Xs), lists:min(Ys), lists:max(Xs), lists:max(Ys)}.
-
-%% @spec mean([number()]) -> {Mean, StdDev, N}
-%% Mean = float()
-%% StdDev = float()
-%% N = integer()
-%% @doc Calculates the mean and the standard deviation of a set of
-%% numbers.
-
-mean([]) -> {0, 0, 0};
-mean([Value]) -> {Value, 0, 1};
-mean(List) -> mean(List, {0, 0, 0}).
-
-mean([], {Sum, SumSquare, N}) ->
- Mean = Sum / N,
- StdDev = math:sqrt((SumSquare - Sum*Sum/N)/(N - 1)),
- {Mean, StdDev, N};
-mean([Value | List], {Sum, SumSquare, N}) ->
- mean(List, {Sum + Value, SumSquare + Value*Value, N + 1}).
-
-
-
-activities2count2(Acts, StartTs) ->
- Start = inactive_start_states(Acts),
- activities2count2(Acts, StartTs, Start, []).
-
-activities2count2([], _, _, Out) -> lists:reverse(Out);
-activities2count2([#activity{ id = Id, timestamp = Ts, state = active} | Acts], StartTs, {Proc,Port}, Out) when is_pid(Id) ->
- activities2count2(Acts, StartTs, {Proc + 1, Port}, [{?seconds(Ts, StartTs), Proc + 1, Port}|Out]);
-activities2count2([#activity{ id = Id, timestamp = Ts, state = inactive} | Acts], StartTs, {Proc,Port}, Out) when is_pid(Id) ->
- activities2count2(Acts, StartTs, {Proc - 1, Port}, [{?seconds(Ts, StartTs), Proc - 1, Port}|Out]);
-activities2count2([#activity{ id = Id, timestamp = Ts, state = active} | Acts], StartTs, {Proc,Port}, Out) when is_port(Id) ->
- activities2count2(Acts, StartTs, {Proc, Port + 1}, [{?seconds(Ts, StartTs), Proc, Port + 1}|Out]);
-activities2count2([#activity{ id = Id, timestamp = Ts, state = inactive} | Acts], StartTs, {Proc,Port}, Out) when is_port(Id) ->
- activities2count2(Acts, StartTs, {Proc, Port - 1}, [{?seconds(Ts, StartTs), Proc, Port - 1}|Out]).
-
-
-inactive_start_states(Acts) ->
- D = activity_start_states(Acts, dict:new()),
- dict:fold(fun
- (K, inactive, {Procs, Ports}) when is_pid(K) -> {Procs + 1, Ports};
- (K, inactive, {Procs, Ports}) when is_port(K) -> {Procs, Ports + 1};
- (_, _, {Procs, Ports}) -> {Procs, Ports}
- end, {0,0}, D).
-activity_start_states([], D) -> D;
-activity_start_states([#activity{id = Id, state = State}|Acts], D) ->
- case dict:is_key(Id, D) of
- true -> activity_start_states(Acts, D);
- false -> activity_start_states(Acts, dict:store(Id, State, D))
- end.
-
-
-
-
-%% @spec activities2count(#activity{}, timestamp()) -> Result
-%% Result = [{Time, ProcessCount, PortCount}]
-%% Time = float()
-%% ProcessCount = integer()
-%% PortCount = integer()
-%% @doc Calculate the resulting active processes and ports during
-%% the activity interval.
-%% Also checks active/inactive consistency.
-%% A task will always begin with an active state and end with an inactive state.
-
-activities2count(Acts, StartTs) when is_list(Acts) -> activities2count(Acts, StartTs, separated).
-
-activities2count(Acts, StartTs, Type) when is_list(Acts) -> activities2count_loop(Acts, {StartTs, {0,0}}, Type, []).
-
-activities2count_loop([], _, _, Out) -> lists:reverse(Out);
-activities2count_loop(
- [#activity{ timestamp = Ts, id = Id, runnable_count = Rc} | Acts],
- {StartTs, {Procs, Ports}}, separated, Out) ->
-
- Time = ?seconds(Ts, StartTs),
- case Id of
- Id when is_port(Id) ->
- Entry = {Time, Procs, Rc},
- activities2count_loop(Acts, {StartTs, {Procs, Rc}}, separated, [Entry | Out]);
- Id when is_pid(Id) ->
- Entry = {Time, Rc, Ports},
- activities2count_loop(Acts, {StartTs, {Rc, Ports}}, separated, [Entry | Out]);
- _ ->
- activities2count_loop(Acts, {StartTs,{Procs, Ports}}, separated, Out)
- end;
-activities2count_loop(
- [#activity{ timestamp = Ts, id = Id, runnable_count = Rc} | Acts],
- {StartTs, {Procs, Ports}}, summated, Out) ->
-
- Time = ?seconds(Ts, StartTs),
- case Id of
- Id when is_port(Id) ->
- Entry = {Time, Procs + Rc},
- activities2count_loop(Acts, {StartTs, {Procs, Rc}}, summated, [Entry | Out]);
- Id when is_pid(Id) ->
- Entry = {Time, Rc + Ports},
- activities2count_loop(Acts, {StartTs, {Rc, Ports}}, summated, [Entry | Out])
- end.
-
-%% @spec waiting_activities([#activity{}]) -> FunctionList
-%% FunctionList = [{Seconds, Mfa, {Mean, StdDev, N}}]
-%% Seconds = float()
-%% Mfa = mfa()
-%% Mean = float()
-%% StdDev = float()
-%% N = integer()
-%% @doc Calculates the time, both average and total, that a process has spent
-%% in a receive state at specific function. However, if there are multiple receives
-%% in a function it cannot differentiate between them.
-
-waiting_activities(Activities) ->
- ListedMfas = waiting_activities_mfa_list(Activities, []),
- Unsorted = lists:foldl(
- fun (Mfa, MfaList) ->
- {Total, WaitingTimes} = get({waiting_mfa, Mfa}),
-
- % cleanup
- erlang:erase({waiting_mfa, Mfa}),
-
- % statistics of receive waiting places
- Stats = mean(WaitingTimes),
-
- [{Total, Mfa, Stats} | MfaList]
- end, [], ListedMfas),
- lists:sort(fun ({A,_,_},{B,_,_}) ->
- if
- A > B -> true;
- true -> false
- end
- end, Unsorted).
-
-
-%% Generate lists of receive waiting times per mfa
-%% Out:
-%% ListedMfas = [mfa()]
-%% Intrisnic:
-%% get({waiting, mfa()}) ->
-%% [{waiting, mfa()}, {Total, [WaitingTime]})
-%% WaitingTime = float()
-
-waiting_activities_mfa_list([], ListedMfas) -> ListedMfas;
-waiting_activities_mfa_list([Activity|Activities], ListedMfas) ->
- #activity{id = Pid, state = Act, timestamp = Time, where = MFA} = Activity,
- case Act of
- active ->
- waiting_activities_mfa_list(Activities, ListedMfas);
- inactive ->
- % Want to know how long the wait is in a receive,
- % it is given via the next activity
- case Activities of
- [] ->
- [Info] = percept_db:select(information, Pid),
- case Info#information.stop of
- undefined ->
- % get profile end time
- Waited = ?seconds(
- percept_db:select({system,stop_ts}),
- Time);
- Time2 ->
- Waited = ?seconds(Time2, Time)
- end,
- case get({waiting_mfa, MFA}) of
- undefined ->
- put({waiting_mfa, MFA}, {Waited, [Waited]}),
- [MFA | ListedMfas];
- {Total, TimedMfa} ->
- put({waiting_mfa, MFA}, {Total + Waited, [Waited | TimedMfa]}),
- ListedMfas
- end;
- [#activity{timestamp=Time2, id = Pid, state = active} | _ ] ->
- % Calculate waiting time
- Waited = ?seconds(Time2, Time),
- % Get previous entry
-
- case get({waiting_mfa, MFA}) of
- undefined ->
- % add entry to list
- put({waiting_mfa, MFA}, {Waited, [Waited]}),
- waiting_activities_mfa_list(Activities, [MFA|ListedMfas]);
- {Total, TimedMfa} ->
- put({waiting_mfa, MFA}, {Total + Waited, [Waited | TimedMfa]}),
- waiting_activities_mfa_list(Activities, ListedMfas)
- end;
- _ -> error
- end
- end.
-
-%% seconds2ts(Seconds, StartTs) -> TS
-%% In:
-%% Seconds = float()
-%% StartTs = timestamp()
-%% Out:
-%% TS = timestamp()
-
-%% @spec seconds2ts(float(), StartTs::{integer(),integer(),integer()}) -> timestamp()
-%% @doc Calculates a timestamp given a duration in seconds and a starting timestamp.
-
-seconds2ts(Seconds, {Ms, S, Us}) ->
- % Calculate mega seconds integer
- MsInteger = trunc(Seconds) div 1000000 ,
-
- % Calculate the reminder for seconds
- SInteger = trunc(Seconds),
-
- % Calculate the reminder for micro seconds
- UsInteger = trunc((Seconds - SInteger) * 1000000),
-
- % Wrap overflows
-
- UsOut = (UsInteger + Us) rem 1000000,
- SOut = ((SInteger + S) + (UsInteger + Us) div 1000000) rem 1000000,
- MsOut = (MsInteger+ Ms) + ((SInteger + S) + (UsInteger + Us) div 1000000) div 1000000,
-
- {MsOut, SOut, UsOut}.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%
-% Analyze interval for concurrency
-%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% @spec analyze_activities(integer(), [#activity{}]) -> [{integer(),#activity{}}]
-%% @hidden
-
-analyze_activities(Threshold, Activities) ->
- RunnableCount = runnable_count(Activities, 0),
- analyze_runnable_activities(Threshold, RunnableCount).
-
-
-%% runnable_count(Activities, StartValue) -> RunnableCount
-%% In:
-%% Activities = [activity()]
-%% StartValue = integer()
-%% Out:
-%% RunnableCount = [{integer(), activity()}]
-%% Purpose:
-%% Calculate the runnable count of a given interval of generic
-%% activities.
-
-%% @spec runnable_count([#activity{}]) -> [{integer(),#activity{}}]
-%% @hidden
-
-runnable_count(Activities) ->
- Threshold = runnable_count_threshold(Activities),
- runnable_count(Activities, Threshold, []).
-
-runnable_count_threshold(Activities) ->
- CountedActs = runnable_count(Activities, 0),
- Counts = [C || {C, _} <- CountedActs],
- Min = lists:min(Counts),
- 0 - Min.
-%% @spec runnable_count([#activity{}],integer()) -> [{integer(),#activity{}}]
-%% @hidden
-
-runnable_count(Activities, StartCount) when is_integer(StartCount) ->
- runnable_count(Activities, StartCount, []).
-runnable_count([], _ , Out) ->
- lists:reverse(Out);
-runnable_count([A | As], PrevCount, Out) ->
- case A#activity.state of
- active ->
- runnable_count(As, PrevCount + 1, [{PrevCount + 1, A} | Out]);
- inactive ->
- runnable_count(As, PrevCount - 1, [{PrevCount - 1, A} | Out])
- end.
-
-%% In:
-%% Threshold = integer(),
-%% RunnableActivities = [{Rc, activity()}]
-%% Rc = integer()
-
-analyze_runnable_activities(Threshold, RunnableActivities) ->
- analyze_runnable_activities(Threshold, RunnableActivities, []).
-
-analyze_runnable_activities( _z, [], Out) ->
- lists:reverse(Out);
-analyze_runnable_activities(Threshold, [{Rc, Act} | RunnableActs], Out) ->
- if
- Rc =< Threshold ->
- analyze_runnable_activities(Threshold, RunnableActs, [{Rc,Act} | Out]);
- true ->
- analyze_runnable_activities(Threshold, RunnableActs, Out)
- end.
-
-%% minmax_activity(Activities, Count) -> {Min, Max}
-%% In:
-%% Activities = [activity()]
-%% InitialCount = non_neg_integer()
-%% Out:
-%% {Min, Max}
-%% Min = non_neg_integer()
-%% Max = non_neg_integer()
-%% Purpose:
-%% Minimal and maximal activity during an activity interval.
-%% Initial activity count needs to be supplied.
-
-%% @spec minmax_activities([#activity{}], integer()) -> {integer(), integer()}
-%% @doc Calculates the minimum and maximum of runnable activites (processes
-% and ports) during the interval of reffered by the activity list.
-
-minmax_activities(Activities, Count) ->
- minmax_activities(Activities, Count, {Count, Count}).
-minmax_activities([], _, Out) ->
- Out;
-minmax_activities([A|Acts], Count, {Min, Max}) ->
- case A#activity.state of
- active ->
- minmax_activities(Acts, Count + 1, {Min, lists:max([Count + 1, Max])});
- inactive ->
- minmax_activities(Acts, Count - 1, {lists:min([Count - 1, Min]), Max})
- end.
diff --git a/lib/percept/src/percept_db.erl b/lib/percept/src/percept_db.erl
deleted file mode 100644
index 6cbe3ce022..0000000000
--- a/lib/percept/src/percept_db.erl
+++ /dev/null
@@ -1,780 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc Percept database.
-%%
-%%
-
--module(percept_db).
-
--export([start/0,
- stop/0,
- insert/1,
- select/2,
- select/1,
- consolidate/0]).
-
--include("percept.hrl").
--define(STOP_TIMEOUT, 1000).
-%%==========================================================================
-%% Type definitions
-%%==========================================================================
-
-%% @type activity_option() =
-%% {ts_min, timestamp()} |
-%% {ts_max, timestamp()} |
-%% {ts_exact, bool()} |
-%% {mfa, {atom(), atom(), byte()}} |
-%% {state, active | inactive} |
-%% {id, all | procs | ports | pid() | port()}
-
-%% @type scheduler_option() =
-%% {ts_min, timestamp()} |
-%% {ts_max, timestamp()} |
-%% {ts_exact, bool()} |
-%% {id, scheduler_id()}
-
-%% @type system_option() = start_ts | stop_ts
-
-%% @type information_option() =
-%% all | procs | ports | pid() | port()
-
-
-
-
-%%==========================================================================
-%% Interface functions
-%%==========================================================================
-
-%% @spec start() -> ok | {started, Pid} | {restarted, Pid}
-%% Pid = pid()
-%% @doc Starts or restarts the percept database.
-
--spec start() -> {'started', pid()} | {'restarted', pid()}.
-
-start() ->
- case erlang:whereis(percept_db) of
- undefined ->
- {started, do_start()};
- PerceptDB ->
- {restarted, restart(PerceptDB)}
- end.
-
-%% @spec restart(pid()) -> pid()
-%% @private
-%% @doc restarts the percept database.
-
--spec restart(pid())-> pid().
-
-restart(PerceptDB)->
- stop_sync(PerceptDB),
- do_start().
-
-%% @spec do_start() -> pid()
-%% @private
-%% @doc starts the percept database.
-
--spec do_start()-> pid().
-
-do_start()->
- Pid = spawn(fun() -> init_percept_db() end),
- erlang:register(percept_db, Pid),
- Pid.
-
-%% @spec stop() -> not_started | {stopped, Pid}
-%% Pid = pid()
-%% @doc Stops the percept database.
-
--spec stop() -> 'not_started' | {'stopped', pid()}.
-
-stop() ->
- case erlang:whereis(percept_db) of
- undefined ->
- not_started;
- Pid ->
- Pid ! {action, stop},
- {stopped, Pid}
- end.
-
-%% @spec stop_sync(pid()) -> true
-%% @private
-%% @doc Stops the percept database, with a synchronous call.
-
--spec stop_sync(pid()) -> true.
-
-stop_sync(Pid) ->
- MonitorRef = erlang:monitor(process, Pid),
- _ = stop(),
- receive
- {'DOWN', MonitorRef, _Type, Pid, _Info}->
- true
- after ?STOP_TIMEOUT->
- erlang:demonitor(MonitorRef, [flush]),
- exit(Pid, kill)
- end.
-
-%% @spec insert(tuple()) -> ok
-%% @doc Inserts a trace or profile message to the database.
-
-insert(Trace) ->
- percept_db ! {insert, Trace},
- ok.
-
-
-%% @spec select({atom(), Options}) -> Result
-%% @doc Synchronous call. Selects information based on a query.
-%%
-%% <p>Queries:</p>
-%% <pre>
-%% {system, Option}
-%% Option = system_option()
-%% Result = timestamp()
-%% {information, Options}
-%% Options = [information_option()]
-%% Result = [#information{}]
-%% {scheduler, Options}
-%% Options = [sceduler_option()]
-%% Result = [#activity{}]
-%% {activity, Options}
-%% Options = [activity_option()]
-%% Result = [#activity{}]
-%% </pre>
-%% <p>
-%% Note: selection of Id's are always OR all other options are considered AND.
-%% </p>
-
-select(Query) ->
- percept_db ! {select, self(), Query},
- receive {result, Match} -> Match end.
-
-%% @spec select(atom(), list()) -> Result
-%% @equiv select({Table,Options})
-
-select(Table, Options) ->
- percept_db ! {select, self(), {Table, Options}},
- receive {result, Match} -> Match end.
-
-%% @spec consolidate() -> Result
-%% @doc Checks timestamp and state-flow inconsistencies in the
-%% the database.
-
-consolidate() ->
- percept_db ! {action, consolidate},
- ok.
-
-%%==========================================================================
-%% Database loop
-%%==========================================================================
-
-init_percept_db() ->
- % Proc and Port information
- pdb_info = ets:new(pdb_info, [named_table, private, {keypos, #information.id}, set]),
-
- % Scheduler runnability
- pdb_scheduler = ets:new(pdb_scheduler, [named_table, private, {keypos, #activity.timestamp}, ordered_set]),
-
- % Process and Port runnability
- pdb_activity = ets:new(pdb_activity, [named_table, private, {keypos, #activity.timestamp}, ordered_set]),
-
- % System status
- pdb_system = ets:new(pdb_system, [named_table, private, {keypos, 1}, set]),
-
- % System warnings
- pdb_warnings = ets:new(pdb_warnings, [named_table, private, {keypos, 1}, ordered_set]),
- put(debug, 0),
- loop_percept_db().
-
-loop_percept_db() ->
- receive
- {insert, Trace} ->
- insert_trace(clean_trace(Trace)),
- loop_percept_db();
- {select, Pid, Query} ->
- Pid ! {result, select_query(Query)},
- loop_percept_db();
- {action, stop} ->
- stopped;
- {action, consolidate} ->
- consolidate_db(),
- loop_percept_db();
- {operate, Pid, {Table, {Fun, Start}}} ->
- Result = ets:foldl(Fun, Start, Table),
- Pid ! {result, Result},
- loop_percept_db();
- Unhandled ->
- io:format("loop_percept_db, unhandled query: ~p~n", [Unhandled]),
- loop_percept_db()
- end.
-
-%%==========================================================================
-%% Auxiliary functions
-%%==========================================================================
-
-%% cleans trace messages from external pids
-
-clean_trace(Trace) when is_tuple(Trace) -> list_to_tuple(clean_trace(tuple_to_list(Trace)));
-clean_trace(Trace) when is_list(Trace) -> clean_list(Trace, []);
-clean_trace(Trace) when is_pid(Trace) ->
- PidStr = pid_to_list(Trace),
- [_,P2,P3p] = string:tokens(PidStr,"."),
- P3 = lists:sublist(P3p, 1, length(P3p) - 1),
- erlang:list_to_pid("<0." ++ P2 ++ "." ++ P3 ++ ">");
-clean_trace(Trace) -> Trace.
-
-clean_list([], Out) -> lists:reverse(Out);
-clean_list([Element|Trace], Out) ->
- clean_list(Trace, [clean_trace(Element)|Out]).
-
-
-insert_trace(Trace) ->
- case Trace of
- {profile_start, Ts} ->
- update_system_start_ts(Ts),
- ok;
- {profile_stop, Ts} ->
- update_system_stop_ts(Ts),
- ok;
- %%% erlang:system_profile, option: runnable_procs
- %%% ---------------------------------------------
- {profile, Id, State, Mfa, TS} when is_pid(Id) ->
- % Update runnable count in activity and db
-
- case check_activity_consistency(Id, State) of
- invalid_state ->
- ignored;
- ok ->
- Rc = get_runnable_count(procs, State),
- % Update registered procs
- % insert proc activity
- update_activity(#activity{
- id = Id,
- state = State,
- timestamp = TS,
- runnable_count = Rc,
- where = Mfa}),
- ok
- end;
- %%% erlang:system_profile, option: runnable_ports
- %%% ---------------------------------------------
- {profile, Id, State, Mfa, TS} when is_port(Id) ->
- case check_activity_consistency(Id, State) of
- invalid_state ->
- ignored;
- ok ->
- % Update runnable count in activity and db
- Rc = get_runnable_count(ports, State),
-
- % Update registered ports
- % insert port activity
- update_activity(#activity{
- id = Id,
- state = State,
- timestamp = TS,
- runnable_count = Rc,
- where = Mfa}),
-
- ok
- end;
- %%% erlang:system_profile, option: scheduler
- {profile, scheduler, Id, State, Scheds, Ts} ->
- % insert scheduler activity
- update_scheduler(#activity{
- id = {scheduler, Id},
- state = State,
- timestamp = Ts,
- where = Scheds}),
- ok;
-
- %%% erlang:trace, option: procs
- %%% ---------------------------
- {trace_ts, Parent, spawn, Pid, Mfa, TS} ->
- InformativeMfa = mfa2informative(Mfa),
- % Update id_information
- update_information(#information{id = Pid, start = TS, parent = Parent, entry = InformativeMfa}),
- update_information_child(Parent, Pid),
- ok;
- {trace_ts, Pid, exit, _Reason, TS} ->
- % Update registered procs
-
- % Update id_information
- update_information(#information{id = Pid, stop = TS}),
-
- ok;
- {trace_ts, Pid, register, Name, _Ts} when is_pid(Pid) ->
- % Update id_information
- update_information(#information{id = Pid, name = Name}),
- ok;
- {trace_ts, Pid, register, Name, _Ts} when is_pid(Pid) ->
- % Update id_information
- update_information(#information{id = Pid, name = Name}),
- ok;
- {trace_ts, _Pid, unregister, _Name, _Ts} ->
- % Not implemented
- ok;
- {trace_ts, Pid, getting_unlinked, _Id, _Ts} when is_pid(Pid) ->
- % Update id_information
- ok;
- {trace_ts, Pid, getting_linked, _Id, _Ts} when is_pid(Pid)->
- % Update id_information
- ok;
- {trace_ts, Pid, link, _Id, _Ts} when is_pid(Pid)->
- % Update id_information
- ok;
- {trace_ts, Pid, unlink, _Id, _Ts} when is_pid(Pid) ->
- % Update id_information
- ok;
-
- %%% erlang:trace, option: ports
- %%% ----------------------------
- {trace_ts, Caller, open, Port, Driver, TS} ->
- % Update id_information
- update_information(#information{
- id = Port, entry = Driver, start = TS, parent = Caller}),
- ok;
- {trace_ts, Port, closed, _Reason, Ts} ->
- % Update id_information
- update_information(#information{id = Port, stop = Ts}),
- ok;
-
- Unhandled ->
- io:format("insert_trace, unhandled: ~p~n", [Unhandled])
- end.
-
-mfa2informative({erlang, apply, [M, F, Args]}) -> mfa2informative({M, F,Args});
-mfa2informative({erlang, apply, [Fun, Args]}) ->
- FunInfo = erlang:fun_info(Fun),
- M = case proplists:get_value(module, FunInfo, undefined) of
- [] -> undefined_fun_module;
- undefined -> undefined_fun_module;
- Module -> Module
- end,
- F = case proplists:get_value(name, FunInfo, undefined) of
- [] -> undefined_fun_function;
- undefined -> undefined_fun_function;
- Function -> Function
- end,
- mfa2informative({M, F, Args});
-mfa2informative(Mfa) -> Mfa.
-
-%% consolidate_db() -> bool()
-%% Purpose:
-%% Check start/stop time
-%% Activity consistency
-
-consolidate_db() ->
- io:format("Consolidating...~n"),
- % Check start/stop timestamps
- case select_query({system, start_ts}) of
- undefined ->
- Min = lists:min(list_all_ts()),
- update_system_start_ts(Min);
- _ -> ok
- end,
- case select_query({system, stop_ts}) of
- undefined ->
- Max = lists:max(list_all_ts()),
- update_system_stop_ts(Max);
- _ -> ok
- end,
- consolidate_runnability(),
- ok.
-
-consolidate_runnability() ->
- put({runnable, procs}, undefined),
- put({runnable, ports}, undefined),
- consolidate_runnability_loop(ets:first(pdb_activity)).
-
-consolidate_runnability_loop('$end_of_table') -> ok;
-consolidate_runnability_loop(Key) ->
- case ets:lookup(pdb_activity, Key) of
- [#activity{id = Id, state = State } = A] when is_pid(Id) ->
- Rc = get_runnable_count(procs, State),
- ets:insert(pdb_activity, A#activity{ runnable_count = Rc});
- [#activity{id = Id, state = State } = A] when is_port(Id) ->
- Rc = get_runnable_count(ports, State),
- ets:insert(pdb_activity, A#activity{ runnable_count = Rc});
- _ -> throw(consolidate)
- end,
- consolidate_runnability_loop(ets:next(pdb_activity, Key)).
-
-list_all_ts() ->
- ATs = [Act#activity.timestamp || Act <- select_query({activity, []})],
- STs = [Act#activity.timestamp || Act <- select_query({scheduler, []})],
- ITs = lists:flatten([
- [I#information.start,
- I#information.stop] ||
- I <- select_query({information, all})]),
- %% Filter out all undefined (non ts)
- [Elem || Elem = {_,_,_} <- ATs ++ STs ++ ITs].
-
-%% get_runnable_count(Type, State) -> RunnableCount
-%% In:
-%% Type = procs | ports
-%% State = active | inactive
-%% Out:
-%% RunnableCount = integer()
-%% Purpose:
-%% Keep track of the number of runnable ports and processes
-%% during the profile duration.
-
-get_runnable_count(Type, State) ->
- case {get({runnable, Type}), State} of
- {undefined, active} ->
- put({runnable, Type}, 1),
- 1;
- {N, active} ->
- put({runnable, Type}, N + 1),
- N + 1;
- {N, inactive} ->
- put({runnable, Type}, N - 1),
- N - 1;
- Unhandled ->
- io:format("get_runnable_count, unhandled ~p~n", [Unhandled]),
- Unhandled
- end.
-
-check_activity_consistency(Id, State) ->
- case get({previous_state, Id}) of
- State ->
- io:format("check_activity_consistency, state flow invalid.~n"),
- invalid_state;
- undefined when State == inactive ->
- invalid_state;
- _ ->
- put({previous_state, Id}, State),
- ok
- end.
-%%%
-%%% select_query
-%%% In:
-%%% Query = {Table, Option}
-%%% Table = system | activity | scheduler | information
-
-
-select_query(Query) ->
- case Query of
- {system, _ } ->
- select_query_system(Query);
- {activity, _ } ->
- select_query_activity(Query);
- {scheduler, _} ->
- select_query_scheduler(Query);
- {information, _ } ->
- select_query_information(Query);
- Unhandled ->
- io:format("select_query, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-%%% select_query_information
-
-select_query_information(Query) ->
- case Query of
- {information, all} ->
- ets:select(pdb_info, [{
- #information{ _ = '_'},
- [],
- ['$_']
- }]);
- {information, procs} ->
- ets:select(pdb_info, [{
- #information{ id = '$1', _ = '_'},
- [{is_pid, '$1'}],
- ['$_']
- }]);
- {information, ports} ->
- ets:select(pdb_info, [{
- #information{ id = '$1', _ = '_'},
- [{is_port, '$1'}],
- ['$_']
- }]);
- {information, Id} when is_port(Id) ; is_pid(Id) ->
- ets:select(pdb_info, [{
- #information{ id = Id, _ = '_'},
- [],
- ['$_']
- }]);
- Unhandled ->
- io:format("select_query_information, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-%%% select_query_scheduler
-
-select_query_scheduler(Query) ->
- case Query of
- {scheduler, Options} when is_list(Options) ->
- Head = #activity{
- timestamp = '$1',
- id = '$2',
- state = '$3',
- where = '$4',
- _ = '_'},
- Body = ['$_'],
- % We don't need id's
- {Constraints, _ } = activity_ms_and(Head, Options, [], []),
- ets:select(pdb_scheduler, [{Head, Constraints, Body}]);
- Unhandled ->
- io:format("select_query_scheduler, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-%%% select_query_system
-
-select_query_system(Query) ->
- case Query of
- {system, start_ts} ->
- case ets:lookup(pdb_system, {system, start_ts}) of
- [] -> undefined;
- [{{system, start_ts}, StartTS}] -> StartTS
- end;
- {system, stop_ts} ->
- case ets:lookup(pdb_system, {system, stop_ts}) of
- [] -> undefined;
- [{{system, stop_ts}, StopTS}] -> StopTS
- end;
- Unhandled ->
- io:format("select_query_system, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-%%% select_query_activity
-
-select_query_activity(Query) ->
- case Query of
- {activity, Options} when is_list(Options) ->
- case lists:member({ts_exact, true},Options) of
- true ->
- case catch select_query_activity_exact_ts(Options) of
- {'EXIT', Reason} ->
- io:format(" - select_query_activity [ catch! ]: ~p~n", [Reason]),
- [];
- Match ->
- Match
- end;
- false ->
- MS = activity_ms(Options),
- case catch ets:select(pdb_activity, MS) of
- {'EXIT', Reason} ->
- io:format(" - select_query_activity [ catch! ]: ~p~n", [Reason]),
- [];
- Match ->
- Match
- end
- end;
- Unhandled ->
- io:format("select_query_activity, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-select_query_activity_exact_ts(Options) ->
- case { proplists:get_value(ts_min, Options, undefined), proplists:get_value(ts_max, Options, undefined) } of
- {undefined, undefined} -> [];
- {undefined, _ } -> [];
- {_ , undefined} -> [];
- {TsMin , TsMax } ->
- % Remove unwanted options
- Opts = lists_filter([ts_exact], Options),
- Ms = activity_ms(Opts),
- case ets:select(pdb_activity, Ms) of
- % no entries within interval
- [] ->
- Opts2 = lists_filter([ts_max, ts_min], Opts) ++ [{ts_min, TsMax}],
- Ms2 = activity_ms(Opts2),
- case ets:select(pdb_activity, Ms2, 1) of
- '$end_of_table' -> [];
- {[E], _} ->
- [PrevAct] = ets:lookup(pdb_activity, ets:prev(pdb_activity, E#activity.timestamp)),
- [PrevAct#activity{ timestamp = TsMin} , E]
- end;
- Acts ->
- [Head| _] = Acts,
- if
- Head#activity.timestamp == TsMin -> Acts;
- true ->
- PrevTs = ets:prev(pdb_activity, Head#activity.timestamp),
- case ets:lookup(pdb_activity, PrevTs) of
- [] -> Acts;
- [PrevAct] -> [PrevAct#activity{timestamp = TsMin}|Acts]
- end
- end
- end
- end.
-
-lists_filter([], Options) -> Options;
-lists_filter([D|Ds], Options) ->
- lists_filter(Ds, lists:filter(
- fun ({Pred, _}) ->
- if
- Pred == D -> false;
- true -> true
- end
- end, Options)).
-
-% Options:
-% {ts_min, timestamp()}
-% {ts_max, timestamp()}
-% {mfa, mfa()}
-% {state, active | inactive}
-% {id, all | procs | ports | pid() | port()}
-%
-% All options are regarded as AND expect id which are regarded as OR
-% For example: [{ts_min, TS1}, {ts_max, TS2}, {id, PID1}, {id, PORT1}] would be
-% ({ts_min, TS1} and {ts_max, TS2} and {id, PID1}) or
-% ({ts_min, TS1} and {ts_max, TS2} and {id, PORT1}).
-
-activity_ms(Opts) ->
- % {activity, Timestamp, State, Mfa}
- Head = #activity{
- timestamp = '$1',
- id = '$2',
- state = '$3',
- where = '$4',
- _ = '_'},
-
- {Conditions, IDs} = activity_ms_and(Head, Opts, [], []),
- Body = ['$_'],
-
- lists:foldl(
- fun (Option, MS) ->
- case Option of
- {id, ports} ->
- [{Head, [{is_port, Head#activity.id} | Conditions], Body} | MS];
- {id, procs} ->
- [{Head,[{is_pid, Head#activity.id} | Conditions], Body} | MS];
- {id, ID} when is_pid(ID) ; is_port(ID) ->
- [{Head,[{'==', Head#activity.id, ID} | Conditions], Body} | MS];
- {id, all} ->
- [{Head, Conditions,Body} | MS];
- _ ->
- io:format("activity_ms id dropped ~p~n", [Option]),
- MS
- end
- end, [], IDs).
-
-activity_ms_and(_, [], Constraints, []) ->
- {Constraints, [{id, all}]};
-activity_ms_and(_, [], Constraints, IDs) ->
- {Constraints, IDs};
-activity_ms_and(Head, [Opt|Opts], Constraints, IDs) ->
- case Opt of
- {ts_min, Min} ->
- activity_ms_and(Head, Opts,
- [{'>=', Head#activity.timestamp, {Min}} | Constraints], IDs);
- {ts_max, Max} ->
- activity_ms_and(Head, Opts,
- [{'=<', Head#activity.timestamp, {Max}} | Constraints], IDs);
- {id, ID} ->
- activity_ms_and(Head, Opts,
- Constraints, [{id, ID} | IDs]);
- {state, State} ->
- activity_ms_and(Head, Opts,
- [{'==', Head#activity.state, State} | Constraints], IDs);
- {mfa, Mfa} ->
- activity_ms_and(Head, Opts,
- [{'==', Head#activity.where, {Mfa}}| Constraints], IDs);
- _ ->
- io:format("activity_ms_and option dropped ~p~n", [Opt]),
- activity_ms_and(Head, Opts, Constraints, IDs)
- end.
-
-% Information = information()
-
-%%%
-%%% update_information
-%%%
-
-
-update_information(#information{id = Id} = NewInfo) ->
- case ets:lookup(pdb_info, Id) of
- [] ->
- ets:insert(pdb_info, NewInfo),
- ok;
- [Info] ->
- % Remake NewInfo and Info to lists then substitute
- % old values for new values that are not undefined or empty lists.
-
- {_, Result} = lists:foldl(
- fun (InfoElem, {[NewInfoElem | Tail], Out}) ->
- case NewInfoElem of
- undefined ->
- {Tail, [InfoElem | Out]};
- [] ->
- {Tail, [InfoElem | Out]};
- NewInfoElem ->
- {Tail, [NewInfoElem | Out]}
- end
- end, {tuple_to_list(NewInfo), []}, tuple_to_list(Info)),
- ets:insert(pdb_info, list_to_tuple(lists:reverse(Result))),
- ok
- end.
-
-update_information_child(Id, Child) ->
- case ets:lookup(pdb_info, Id) of
- [] ->
- ets:insert(pdb_info,#information{
- id = Id,
- children = [Child]}),
- ok;
- [I] ->
- ets:insert(pdb_info,I#information{children = [Child | I#information.children]}),
- ok
- end.
-
-%%%
-%%% update_activity
-%%%
-update_scheduler(Activity) ->
- ets:insert(pdb_scheduler, Activity).
-
-update_activity(Activity) ->
- ets:insert(pdb_activity, Activity).
-
-%%%
-%%% update_system_ts
-%%%
-
-update_system_start_ts(TS) ->
- case ets:lookup(pdb_system, {system, start_ts}) of
- [] ->
- ets:insert(pdb_system, {{system, start_ts}, TS});
- [{{system, start_ts}, StartTS}] ->
- DT = ?seconds(StartTS, TS),
- if
- DT > 0.0 -> ets:insert(pdb_system, {{system, start_ts}, TS});
- true -> ok
- end;
- Unhandled ->
- io:format("update_system_start_ts, unhandled ~p ~n", [Unhandled])
- end.
-
-update_system_stop_ts(TS) ->
- case ets:lookup(pdb_system, {system, stop_ts}) of
- [] ->
- ets:insert(pdb_system, {{system, stop_ts}, TS});
- [{{system, stop_ts}, StopTS}] ->
- DT = ?seconds(StopTS, TS),
- if
- DT < 0.0 -> ets:insert(pdb_system, {{system, stop_ts}, TS});
- true -> ok
- end;
- Unhandled ->
- io:format("update_system_stop_ts, unhandled ~p ~n", [Unhandled])
- end.
diff --git a/lib/percept/src/percept_graph.erl b/lib/percept/src/percept_graph.erl
deleted file mode 100644
index e5bbaca2b4..0000000000
--- a/lib/percept/src/percept_graph.erl
+++ /dev/null
@@ -1,134 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%% @doc Interface for CGI request on graphs used by percept. The module exports two functions that are implementations for ESI callbacks used by the httpd server. See http://www.erlang.org//doc/apps/inets/index.html.
-
--module(percept_graph).
--export([proc_lifetime/3, graph/3, scheduler_graph/3, activity/3, percentage/3]).
-
--include("percept.hrl").
--include_lib("kernel/include/file.hrl").
-
-%% API
-
-%% graph
-%% @spec graph(SessionID, Env, Input) -> term()
-%% @doc An ESI callback implementation used by the httpd server.
-%%
-
-graph(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(graph(Env, Input))).
-
-%% activity
-%% @spec activity(SessionID, Env, Input) -> term()
-%% @doc An ESI callback implementation used by the httpd server.
-
-activity(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(activity_bar(Env, Input))).
-
-proc_lifetime(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(proc_lifetime(Env, Input))).
-
-percentage(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(percentage(Env,Input))).
-
-scheduler_graph(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(scheduler_graph(Env, Input))).
-
-graph(_Env, Input) ->
- Query = httpd:parse_query(Input),
- RangeMin = percept_html:get_option_value("range_min", Query),
- RangeMax = percept_html:get_option_value("range_max", Query),
- Pids = percept_html:get_option_value("pids", Query),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
-
- % Convert Pids to id option list
- IDs = [ {id, ID} || ID <- Pids],
-
- % seconds2ts
- StartTs = percept_db:select({system, start_ts}),
- TsMin = percept_analyzer:seconds2ts(RangeMin, StartTs),
- TsMax = percept_analyzer:seconds2ts(RangeMax, StartTs),
-
- Options = [{ts_min, TsMin},{ts_max, TsMax} | IDs],
-
- Acts = percept_db:select({activity, Options}),
- Counts = case IDs of
- [] -> percept_analyzer:activities2count(Acts, StartTs);
- _ -> percept_analyzer:activities2count2(Acts, StartTs)
- end,
-
- percept_image:graph(Width, Height,Counts).
-
-scheduler_graph(_Env, Input) ->
- Query = httpd:parse_query(Input),
- RangeMin = percept_html:get_option_value("range_min", Query),
- RangeMax = percept_html:get_option_value("range_max", Query),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
-
- StartTs = percept_db:select({system, start_ts}),
- TsMin = percept_analyzer:seconds2ts(RangeMin, StartTs),
- TsMax = percept_analyzer:seconds2ts(RangeMax, StartTs),
-
-
- Acts = percept_db:select({scheduler, [{ts_min, TsMin}, {ts_max,TsMax}]}),
-
- Counts = [{?seconds(Ts, StartTs), Scheds, 0} || #activity{where = Scheds, timestamp = Ts} <- Acts],
-
- percept_image:graph(Width, Height, Counts).
-
-activity_bar(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Pid = percept_html:get_option_value("pid", Query),
- Min = percept_html:get_option_value("range_min", Query),
- Max = percept_html:get_option_value("range_max", Query),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
-
- Data = percept_db:select({activity, [{id, Pid}]}),
- StartTs = percept_db:select({system, start_ts}),
- Activities = [{?seconds(Ts, StartTs), State} || #activity{timestamp = Ts, state = State} <- Data],
-
- percept_image:activities(Width, Height, {Min,Max},Activities).
-
-proc_lifetime(_Env, Input) ->
- Query = httpd:parse_query(Input),
- ProfileTime = percept_html:get_option_value("profiletime", Query),
- Start = percept_html:get_option_value("start", Query),
- End = percept_html:get_option_value("end", Query),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
- percept_image:proc_lifetime(round(Width), round(Height), float(Start), float(End), float(ProfileTime)).
-
-percentage(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
- Percentage = percept_html:get_option_value("percentage", Query),
- percept_image:percentage(round(Width), round(Height), float(Percentage)).
-
-header() ->
- "Content-Type: image/png\r\n\r\n".
diff --git a/lib/percept/src/percept_html.erl b/lib/percept/src/percept_html.erl
deleted file mode 100644
index a675227584..0000000000
--- a/lib/percept/src/percept_html.erl
+++ /dev/null
@@ -1,707 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
--module(percept_html).
--export([page/3,
- codelocation_page/3,
- databases_page/3,
- load_database_page/3,
- processes_page/3,
- concurrency_page/3,
- process_info_page/3]).
-
--export([value2pid/1,
- pid2value/1,
- get_option_value/2,
- join_strings_with/2]).
-
--include("percept.hrl").
--include_lib("kernel/include/file.hrl").
-
-
-%% API
-
-page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, overview_content(Env, Input)),
- ok = mod_esi:deliver(SessionID, footer()).
-
-processes_page(SessionID, _, _) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, processes_content()),
- ok = mod_esi:deliver(SessionID, footer()).
-
-concurrency_page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, concurrency_content(Env, Input)),
- ok = mod_esi:deliver(SessionID, footer()).
-
-databases_page(SessionID, _, _) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, databases_content()),
- ok = mod_esi:deliver(SessionID, footer()).
-
-codelocation_page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, codelocation_content(Env, Input)),
- ok = mod_esi:deliver(SessionID, footer()).
-
-process_info_page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, process_info_content(Env, Input)),
- ok = mod_esi:deliver(SessionID, footer()).
-
-load_database_page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
-
- % Very dynamic page, handled differently
- load_database_content(SessionID, Env, Input),
- ok = mod_esi:deliver(SessionID, footer()).
-
-
-%%% --------------------------- %%%
-%%% Content pages %%%
-%%% --------------------------- %%%
-
-overview_content(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Min = get_option_value("range_min", Query),
- Max = get_option_value("range_max", Query),
- Width = 1200,
- Height = 600,
- TotalProfileTime = ?seconds( percept_db:select({system, stop_ts}),
- percept_db:select({system, start_ts})),
- RegisteredProcs = length(percept_db:select({information, procs})),
- RegisteredPorts = length(percept_db:select({information, ports})),
-
- InformationTable =
- "<table>" ++
- table_line(["Profile time:", TotalProfileTime]) ++
- table_line(["Processes:", RegisteredProcs]) ++
- table_line(["Ports:", RegisteredPorts]) ++
- table_line(["Min. range:", Min]) ++
- table_line(["Max. range:", Max]) ++
- "</table>",
-
- Header = "
- <div id=\"content\">
- <div>" ++ InformationTable ++ "</div>\n
- <form name=form_area method=POST action=/cgi-bin/percept_html/page>
- <input name=data_min type=hidden value=" ++ term2html(float(Min)) ++ ">
- <input name=data_max type=hidden value=" ++ term2html(float(Max)) ++ ">\n",
-
-
- RangeTable =
- "<table>"++
- table_line([
- "Min:",
- "<input name=range_min value=" ++ term2html(float(Min)) ++">",
- "<select name=\"graph_select\" onChange=\"select_image()\">
- <option disabled=true value=\""++ url_graph(Width, Height, Min, Max, []) ++"\" />Ports
- <option disabled=true value=\""++ url_graph(Width, Height, Min, Max, []) ++"\" />Processes
- <option value=\""++ url_graph(Width, Height, Min, Max, []) ++"\" />Ports & Processes
- </select>",
- "<input type=submit value=Update>"
- ]) ++
- table_line([
- "Max:",
- "<input name=range_max value=" ++ term2html(float(Max)) ++">",
- "",
- "<a href=/cgi-bin/percept_html/codelocation_page?range_min=" ++
- term2html(Min) ++ "&range_max=" ++ term2html(Max) ++ ">Code location</a>"
- ]) ++
- "</table>",
-
-
- MainTable =
- "<table>" ++
- table_line([div_tag_graph()]) ++
- table_line([RangeTable]) ++
- "</table>",
-
- Footer = "</div></form>",
-
- Header ++ MainTable ++ Footer.
-
-div_tag_graph() ->
- %background:url('/images/loader.gif') no-repeat center;
- "<div id=\"percept_graph\"
- onMouseDown=\"select_down(event)\"
- onMouseMove=\"select_move(event)\"
- onMouseUp=\"select_up(event)\"
-
- style=\"
- background-size: 100%;
- background-origin: content;
- width: 100%;
- position:relative;
- \">
-
- <div id=\"percept_areaselect\"
- style=\"background-color:#ef0909;
- position:relative;
- visibility:hidden;
- border-left: 1px solid #101010;
- border-right: 1px solid #101010;
- z-index:2;
- width:40px;
- height:40px;\"></div></div>".
-
--spec url_graph(
- Widht :: non_neg_integer(),
- Height :: non_neg_integer(),
- Min :: float(),
- Max :: float(),
- Pids :: [pid()]) -> string().
-
-url_graph(W, H, Min, Max, []) ->
- "/cgi-bin/percept_graph/graph?range_min=" ++ term2html(float(Min))
- ++ "&range_max=" ++ term2html(float(Max))
- ++ "&width=" ++ term2html(float(W))
- ++ "&height=" ++ term2html(float(H)).
-
-%%% process_info_content
-
-process_info_content(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Pid = get_option_value("pid", Query),
-
-
- [I] = percept_db:select({information, Pid}),
- ArgumentString = case I#information.entry of
- {_, _, Arguments} -> lists:flatten( [term2html(Arg) ++ "<br>" || Arg <- Arguments]);
- _ -> ""
- end,
-
- TimeTable = html_table([
- [{th, ""},
- {th, "Timestamp"},
- {th, "Profile Time"}],
- [{td, "Start"},
- term2html(I#information.start),
- term2html(procstarttime(I#information.start))],
- [{td, "Stop"},
- term2html(I#information.stop),
- term2html(procstoptime(I#information.stop))]
- ]),
-
- InfoTable = html_table([
- [{th, "Pid"}, term2html(I#information.id)],
- [{th, "Name"}, term2html(I#information.name)],
- [{th, "Entrypoint"}, mfa2html(I#information.entry)],
- [{th, "Arguments"}, ArgumentString],
- [{th, "Timetable"}, TimeTable],
- [{th, "Parent"}, pid2html(I#information.parent)],
- [{th, "Children"}, lists:flatten(lists:map(fun(Child) -> pid2html(Child) ++ " " end, I#information.children))]
- ]),
-
- PidActivities = percept_db:select({activity, [{id, Pid}]}),
- WaitingMfas = percept_analyzer:waiting_activities(PidActivities),
-
- TotalWaitTime = lists:sum( [T || {T, _, _} <- WaitingMfas] ),
-
- MfaTable = html_table([
- [{th, "percentage"},
- {th, "total"},
- {th, "mean"},
- {th, "stddev"},
- {th, "#recv"},
- {th, "module:function/arity"}]] ++ [
- [{td, image_string(percentage, [{width, 100}, {height, 10}, {percentage, Time/TotalWaitTime}])},
- {td, term2html(Time)},
- {td, term2html(Mean)},
- {td, term2html(StdDev)},
- {td, term2html(N)},
- {td, mfa2html(MFA)} ] || {Time, MFA, {Mean, StdDev, N}} <- WaitingMfas]),
-
- "<div id=\"content\">" ++
- InfoTable ++ "<br>" ++
- MfaTable ++
- "</div>".
-
-%%% concurrency content
-concurrency_content(_Env, Input) ->
- %% Get query
- Query = httpd:parse_query(Input),
-
- %% Collect selected pids and generate id tags
- Pids = [value2pid(PidValue) || {PidValue, Case} <- Query, Case == "on", PidValue /= "select_all"],
- IDs = [{id, Pid} || Pid <- Pids],
-
- % FIXME: A lot of extra work here, redo
-
- %% Analyze activities and calculate area bounds
- Activities = percept_db:select({activity, IDs}),
- StartTs = percept_db:select({system, start_ts}),
- Counts = [{Time, Y1 + Y2} || {Time, Y1, Y2} <- percept_analyzer:activities2count2(Activities, StartTs)],
- {T0,_,T1,_} = percept_analyzer:minmax(Counts),
-
- % FIXME: End
-
- PidValues = [pid2value(Pid) || Pid <- Pids],
-
- %% Generate activity bar requests
- ActivityBarTable = lists:foldl(
- fun(Pid, Out) ->
- ValueString = pid2value(Pid),
- Out ++
- table_line([
- pid2html(Pid),
- "<img onload=\"size_image(this, '" ++
- image_string_head("activity", [{"pid", ValueString}, {range_min, T0},{range_max, T1},{height, 10}], []) ++
- "')\" src=/images/white.png border=0 />"
- ])
- end, [], Pids),
-
- %% Make pids request string
- PidsRequest = join_strings_with(PidValues, ":"),
-
- "<div id=\"content\">
- <table cellspacing=0 cellpadding=0 border=0>" ++
- table_line([
- "",
- "<img onload=\"size_image(this, '" ++
- image_string_head("graph", [{"pids", PidsRequest},{range_min, T0}, {range_max, T1}, {height, 400}], []) ++
- "')\" src=/images/white.png border=0 />"
- ]) ++
- ActivityBarTable ++
- "</table></div>\n".
-
-processes_content() ->
- Ports = percept_db:select({information, ports}),
- UnsortedProcesses = percept_db:select({information, procs}),
- SystemStartTS = percept_db:select({system, start_ts}),
- SystemStopTS = percept_db:select({system, stop_ts}),
- ProfileTime = ?seconds( SystemStopTS,
- SystemStartTS),
- Processes = lists:sort(
- fun (A, B) ->
- if
- A#information.id > B#information.id -> true;
- true -> false
- end
- end, UnsortedProcesses),
-
- ProcsHtml = lists:foldl(
- fun (I, Out) ->
- StartTime = procstarttime(I#information.start),
- EndTime = procstoptime(I#information.stop),
- Prepare =
- table_line([
- "<input type=checkbox name=" ++ pid2value(I#information.id) ++ ">",
- pid2html(I#information.id),
- image_string(proc_lifetime, [
- {profiletime, ProfileTime},
- {start, StartTime},
- {"end", term2html(float(EndTime))},
- {width, 100},
- {height, 10}]),
- mfa2html(I#information.entry),
- term2html(I#information.name),
- pid2html(I#information.parent)
- ]),
- [Prepare|Out]
- end, [], Processes),
-
- PortsHtml = lists:foldl(
- fun (I, Out) ->
- StartTime = procstarttime(I#information.start),
- EndTime = procstoptime(I#information.stop),
- Prepare =
- table_line([
- "",
- pid2html(I#information.id),
- image_string(proc_lifetime, [
- {profiletime, ProfileTime},
- {start, StartTime},
- {"end", term2html(float(EndTime))},
- {width, 100},
- {height, 10}]),
- mfa2html(I#information.entry),
- term2html(I#information.name),
- pid2html(I#information.parent)
- ]),
- [Prepare|Out]
- end, [], Ports),
-
- Selector = "<table>" ++
- table_line([
- "<input onClick='selectall()' type=checkbox name=select_all>Select all"]) ++
- table_line([
- "<input type=submit value=Compare>"]) ++
- "</table>",
-
- if
- length(ProcsHtml) > 0 ->
- ProcsHtmlResult =
- "<tr><td><b>Processes</b></td></tr>
- <tr><td>
- <table width=700 cellspacing=0 border=0>
- <tr>
- <td align=middle width=40><b>Select</b></td>
- <td align=middle width=40><b>Pid</b></td>
- <td><b>Lifetime</b></td>
- <td><b>Entrypoint</b></td>
- <td><b>Name</b></td>
- <td><b>Parent</b></td>
- </tr>" ++
- lists:flatten(ProcsHtml) ++
- "</table>
- </td></tr>";
- true ->
- ProcsHtmlResult = ""
- end,
- if
- length(PortsHtml) > 0 ->
- PortsHtmlResult = "
- <tr><td><b>Ports</b></td></tr>
- <tr><td>
- <table width=700 cellspacing=0 border=0>
- <tr>
- <td align=middle width=40><b>Select</b></td>
- <td align=left width=40><b>Pid</b></td>
- <td><b>Lifetime</b></td>
- <td><b>Entrypoint</b></td>
- <td><b>Name</b></td>
- <td><b>Parent</b></td>
- </tr>" ++
- lists:flatten(PortsHtml) ++
- "</table>
- </td></tr>";
- true ->
- PortsHtmlResult = ""
- end,
-
- Right = "<div>"
- ++ Selector ++
- "</div>\n",
-
- Middle = "<div id=\"content\">
- <table>" ++
- ProcsHtmlResult ++
- PortsHtmlResult ++
- "</table>" ++
- Right ++
- "</div>\n",
-
- "<form name=process_select method=POST action=/cgi-bin/percept_html/concurrency_page>" ++
- Middle ++
- "</form>".
-
-procstarttime(TS) ->
- case TS of
- undefined -> 0.0;
- TS -> ?seconds(TS,percept_db:select({system, start_ts}))
- end.
-
-procstoptime(TS) ->
- case TS of
- undefined -> ?seconds( percept_db:select({system, stop_ts}),
- percept_db:select({system, start_ts}));
- TS -> ?seconds(TS, percept_db:select({system, start_ts}))
- end.
-
-databases_content() ->
- "<div id=\"content\">
- <form name=load_percept_file method=post action=/cgi-bin/percept_html/load_database_page>
- <center>
- <table>
- <tr><td>Enter file to analyse:</td><td><input type=hidden name=path /></td></tr>
- <tr><td><input type=file name=file size=40 /></td><td><input type=submit value=Load onClick=\"path.value = file.value;\" /></td></tr>
- </table>
- </center>
- </form>
- </div>".
-
-load_database_content(SessionId, _Env, Input) ->
- Query = httpd:parse_query(Input),
- {_,{_,Path}} = lists:keysearch("file", 1, Query),
- {_,{_,File}} = lists:keysearch("path", 1, Query),
- Filename = filename:join(Path, File),
- % Check path/file/filename
-
- ok = mod_esi:deliver(SessionId, "<div id=\"content\">"),
- case file:read_file_info(Filename) of
- {ok, _} ->
- Content = "<center>
- Parsing: " ++ Filename ++ "<br>
- </center>",
- ok = mod_esi:deliver(SessionId, Content),
- case percept:analyze(Filename) of
- {error, Reason} ->
- ok = mod_esi:deliver(SessionId, error_msg("Analyze" ++ term2html(Reason)));
- _ ->
- Complete = "<center><a href=\"/cgi-bin/percept_html/page\">View</a></center>",
- ok = mod_esi:deliver(SessionId, Complete)
- end;
- {error, Reason} ->
- ok = mod_esi:deliver(SessionId, error_msg("File" ++ term2html(Reason)))
- end,
- ok = mod_esi:deliver(SessionId, "</div>").
-
-codelocation_content(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Min = get_option_value("range_min", Query),
- Max = get_option_value("range_max", Query),
- StartTs = percept_db:select({system, start_ts}),
- TsMin = percept_analyzer:seconds2ts(Min, StartTs),
- TsMax = percept_analyzer:seconds2ts(Max, StartTs),
- Acts = percept_db:select({activity, [{ts_min, TsMin}, {ts_max, TsMax}]}),
-
- Secs = [timer:now_diff(A#activity.timestamp,StartTs)/1000 || A <- Acts],
- Delta = cl_deltas(Secs),
- Zip = lists:zip(Acts, Delta),
- Table = html_table([
- [{th, "delta [ms]"},
- {th, "time [ms]"},
- {th, " pid "},
- {th, "activity"},
- {th, "module:function/arity"},
- {th, "#runnables"}]] ++ [
- [{td, term2html(D)},
- {td, term2html(timer:now_diff(A#activity.timestamp,StartTs)/1000)},
- {td, pid2html(A#activity.id)},
- {td, term2html(A#activity.state)},
- {td, mfa2html(A#activity.where)},
- {td, term2html(A#activity.runnable_count)}] || {A, D} <- Zip ]),
-
- "<div id=\"content\">" ++
- Table ++
- "</div>".
-
-cl_deltas([]) -> [];
-cl_deltas(List) -> cl_deltas(List, [0.0]).
-cl_deltas([_], Out) -> lists:reverse(Out);
-cl_deltas([A,B|Ls], Out) -> cl_deltas([B|Ls], [B - A | Out]).
-
-%%% --------------------------- %%%
-%%% Utility functions %%%
-%%% --------------------------- %%%
-
-%% Should be in string stdlib?
-
-join_strings(Strings) ->
- lists:flatten(Strings).
-
--spec join_strings_with(Strings :: [string()], Separator :: string()) -> string().
-
-join_strings_with([S1, S2 | R], S) ->
- join_strings_with([join_strings_with(S1,S2,S) | R], S);
-join_strings_with([S], _) ->
- S.
-join_strings_with(S1, S2, S) ->
- join_strings([S1,S,S2]).
-
-%%% Generic erlang2html
-
--spec html_table(Rows :: [[string() | {'td' | 'th', string()}]]) -> string().
-
-html_table(Rows) -> "<table>" ++ html_table_row(Rows) ++ "</table>".
-
-html_table_row(Rows) -> html_table_row(Rows, odd).
-html_table_row([], _) -> "";
-html_table_row([Row|Rows], odd ) -> "<tr class=\"odd\">" ++ html_table_data(Row) ++ "</tr>" ++ html_table_row(Rows, even);
-html_table_row([Row|Rows], even) -> "<tr class=\"even\">" ++ html_table_data(Row) ++ "</tr>" ++ html_table_row(Rows, odd ).
-
-html_table_data([]) -> "";
-html_table_data([{td, Data}|Row]) -> "<td>" ++ Data ++ "</td>" ++ html_table_data(Row);
-html_table_data([{th, Data}|Row]) -> "<th>" ++ Data ++ "</th>" ++ html_table_data(Row);
-html_table_data([Data|Row]) -> "<td>" ++ Data ++ "</td>" ++ html_table_data(Row).
-
-
-
-
--spec table_line(Table :: [any()]) -> string().
-
-table_line(List) -> table_line(List, ["<tr>"]).
-table_line([], Out) -> lists:flatten(lists:reverse(["</tr>\n"|Out]));
-table_line([Element | Elements], Out) when is_list(Element) ->
- table_line(Elements, ["<td>" ++ Element ++ "</td>" |Out]);
-table_line([Element | Elements], Out) ->
- table_line(Elements, ["<td>" ++ term2html(Element) ++ "</td>"|Out]).
-
--spec term2html(any()) -> string().
-
-term2html(Term) when is_float(Term) -> lists:flatten(io_lib:format("~.4f", [Term]));
-term2html(Term) -> lists:flatten(io_lib:format("~p", [Term])).
-
--spec mfa2html(MFA :: {atom(), atom(), list() | integer()}) -> string().
-
-mfa2html({Module, Function, Arguments}) when is_list(Arguments) ->
- lists:flatten(io_lib:format("~p:~p/~p", [Module, Function, length(Arguments)]));
-mfa2html({Module, Function, Arity}) when is_integer(Arity) ->
- lists:flatten(io_lib:format("~p:~p/~p", [Module, Function, Arity]));
-mfa2html(_) ->
- "undefined".
-
--spec pid2html(Pid :: pid() | port()) -> string().
-
-pid2html(Pid) when is_pid(Pid) ->
- PidString = term2html(Pid),
- PidValue = pid2value(Pid),
- "<a href=\"/cgi-bin/percept_html/process_info_page?pid="++PidValue++"\">"++PidString++"</a>";
-pid2html(Pid) when is_port(Pid) ->
- term2html(Pid);
-pid2html(_) ->
- "undefined".
-
--spec image_string(Request :: string()) -> string().
-
-image_string(Request) ->
- "<img border=0 src=\"/cgi-bin/percept_graph/" ++
- Request ++
- " \">".
-
--spec image_string(atom() | string(), list()) -> string().
-
-image_string(Request, Options) when is_atom(Request), is_list(Options) ->
- image_string(image_string_head(erlang:atom_to_list(Request), Options, []));
-image_string(Request, Options) when is_list(Options) ->
- image_string(image_string_head(Request, Options, [])).
-
-image_string_head(Request, [{Type, Value} | Opts], Out) when is_atom(Type), is_number(Value) ->
- Opt = join_strings(["?",term2html(Type),"=",term2html(Value)]),
- image_string_tail(Request, Opts, [Opt|Out]);
-image_string_head(Request, [{Type, Value} | Opts], Out) ->
- Opt = join_strings(["?",Type,"=",Value]),
- image_string_tail(Request, Opts, [Opt|Out]).
-
-image_string_tail(Request, [], Out) ->
- join_strings([Request | lists:reverse(Out)]);
-image_string_tail(Request, [{Type, Value} | Opts], Out) when is_atom(Type), is_number(Value) ->
- Opt = join_strings(["&",term2html(Type),"=",term2html(Value)]),
- image_string_tail(Request, Opts, [Opt|Out]);
-image_string_tail(Request, [{Type, Value} | Opts], Out) ->
- Opt = join_strings(["&",Type,"=",Value]),
- image_string_tail(Request, Opts, [Opt|Out]).
-
-
-%%% percept conversions
-
--spec pid2value(Pid :: pid()) -> string().
-
-pid2value(Pid) ->
- String = lists:flatten(io_lib:format("~p", [Pid])),
- lists:sublist(String, 2, erlang:length(String)-2).
-
--spec value2pid(Value :: string()) -> pid().
-
-value2pid(Value) ->
- String = lists:flatten("<" ++ Value ++ ">"),
- erlang:list_to_pid(String).
-
-
-%%% get value
-
--spec get_option_value(Option :: string(), Options :: [{string(),any()}]) ->
- {'error', any()} | boolean() | pid() | [pid()] | number().
-
-get_option_value(Option, Options) ->
- case catch get_option_value0(Option, Options) of
- {'EXIT', Reason} -> {error, Reason};
- Value -> Value
- end.
-
-get_option_value0(Option, Options) ->
- case lists:keysearch(Option, 1, Options) of
- false -> get_default_option_value(Option);
- {value, {Option, _Value}} when Option == "fillcolor" -> true;
- {value, {Option, Value}} when Option == "pid" -> value2pid(Value);
- {value, {Option, Value}} when Option == "pids" ->
- [value2pid(PidValue) || PidValue <- string:tokens(Value,":")];
- {value, {Option, Value}} -> get_number_value(Value);
- _ -> {error, undefined}
- end.
-
-get_default_option_value(Option) ->
- case Option of
- "fillcolor" -> false;
- "range_min" -> float(0.0);
- "pids" -> [];
- "range_max" ->
- Acts = percept_db:select({activity, []}),
- #activity{ timestamp = Start } = hd(Acts),
- #activity{ timestamp = Stop } = hd(lists:reverse(Acts)),
- ?seconds(Stop,Start);
- "width" -> 700;
- "height" -> 400;
- _ -> {error, {undefined_default_option, Option}}
- end.
-
--spec get_number_value(string()) -> number() | {'error', 'illegal_number'}.
-
-get_number_value(Value) ->
- % Try float
- case string:to_float(Value) of
- {error, no_float} ->
- % Try integer
- case string:to_integer(Value) of
- {error, _} -> {error, illegal_number};
- {Integer, _} -> Integer
- end;
- {error, _} -> {error, illegal_number};
- {Float, _} -> Float
- end.
-
-%%% --------------------------- %%%
-%%% html prime functions %%%
-%%% --------------------------- %%%
-
-header() -> header([]).
-header(HeaderData) ->
- "Content-Type: text/html\r\n\r\n" ++
- "<html>
- <head>
- <meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\">
- <title>percept</title>
- <link href=\"/css/percept.css\" rel=\"stylesheet\" type=\"text/css\">
- <script type=\"text/javascript\" src=\"/javascript/percept_error_handler.js\"></script>
- <script type=\"text/javascript\" src=\"/javascript/percept_select_all.js\"></script>
- <script type=\"text/javascript\" src=\"/javascript/percept_area_select.js\"></script>
- " ++ HeaderData ++"
- </head>
- <body onLoad=\"load_image()\">
- <div id=\"header\"><a href=/index.html>percept</a></div>\n".
-
-footer() ->
- "</body>
- </html>\n".
-
-menu() ->
- "<div id=\"menu\" class=\"menu_tabs\">
- <ul>
- <li><a href=/cgi-bin/percept_html/databases_page>databases</a></li>
- <li><a href=/cgi-bin/percept_html/processes_page>processes</a></li>
- <li><a href=/cgi-bin/percept_html/page>overview</a></li>
- </ul></div>\n".
-
--spec error_msg(Error :: string()) -> string().
-
-error_msg(Error) ->
- "<table width=300>
- <tr height=5><td></td> <td></td></tr>
- <tr><td width=150 align=right><b>Error: </b></td> <td align=left>"++ Error ++ "</td></tr>
- <tr height=5><td></td> <td></td></tr>
- </table>\n".
diff --git a/lib/percept/src/percept_image.erl b/lib/percept/src/percept_image.erl
deleted file mode 100644
index e819938027..0000000000
--- a/lib/percept/src/percept_image.erl
+++ /dev/null
@@ -1,316 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
--module(percept_image).
--export([ proc_lifetime/5,
- percentage/3,
- graph/3,
- graph/4,
- activities/3,
- activities/4]).
--record(graph_area, {x = 0, y = 0, width, height}).
--compile(inline).
-
-%%% -------------------------------------
-%%% GRAF
-%%% -------------------------------------
-
-%% graph(Widht, Height, Range, Data)
-
-graph(Width, Height, {RXmin, RYmin, RXmax, RYmax}, Data) ->
- Data2 = [{X, Y1 + Y2} || {X, Y1, Y2} <- Data],
- MinMax = percept_analyzer:minmax(Data2),
- {Xmin, Ymin, Xmax, Ymax} = MinMax,
- graf1(Width, Height,{ lists:min([RXmin, Xmin]),
- lists:min([RYmin, Ymin]),
- lists:max([RXmax, Xmax]),
- lists:max([RYmax, Ymax])}, Data).
-
-%% graph(Widht, Height, Data) = Image
-%% In:
-%% Width = integer(),
-%% Height = integer(),
-%% Data = [{Time, Procs, Ports}]
-%% Time = float()
-%% Procs = integer()
-%% Ports = integer()
-%% Out:
-%% Image = binary()
-
-graph(Width, Height, Data) ->
- Data2 = [{X, Y1 + Y2} || {X, Y1, Y2} <- Data],
- Bounds = percept_analyzer:minmax(Data2),
- graf1(Width, Height, Bounds, Data).
-
-graf1(Width, Height, {Xmin, Ymin, Xmax, Ymax}, Data) ->
- % Calculate areas
- HO = 20,
- GrafArea = #graph_area{x = HO, y = 4, width = Width - 2*HO, height = Height - 17},
- XticksArea = #graph_area{x = HO, y = Height - 13, width = Width - 2*HO, height = 13},
- YticksArea = #graph_area{x = 1, y = 4, width = HO, height = Height - 17},
-
- %% Initiate Image
-
- Image = egd:create(Width, Height),
-
- %% Set colors
-
- Black = egd:color(Image, {0, 0, 0}),
- ProcColor = egd:color(Image, {0, 255, 0}),
- PortColor = egd:color(Image, {255, 0, 0}),
-
- %% Draw graf, xticks and yticks
- draw_graf(Image, Data, {Black, ProcColor, PortColor}, GrafArea, {Xmin, Ymin, Xmax, Ymax}),
- draw_xticks(Image, Black, XticksArea, {Xmin, Xmax}, Data),
- draw_yticks(Image, Black, YticksArea, {Ymin, Ymax}),
-
- %% Kill image and return binaries
- Binary = egd:render(Image, png),
- egd:destroy(Image),
- Binary.
-
-%% draw_graf(Image, Data, Color, GraphArea, DataBounds)
-%% Image, port to Image
-%% Data, list of three tuple data, (X, Y1, Y2)
-%% Color, {ForegroundColor, ProcFillColor, PortFillColor}
-%% DataBounds, {Xmin, Ymin, Xmax, Ymax}
-
-draw_graf(Im, Data, Colors, GA = #graph_area{x = X0, y = Y0, width = Width, height = Height}, {Xmin, _Ymin, Xmax, Ymax}) ->
- Dx = (Width)/(Xmax - Xmin),
- Dy = (Height)/(Ymax),
- Plotdata = [{trunc(X0 + X*Dx - Xmin*Dx), trunc(Y0 + Height - Y1*Dy), trunc(Y0 + Height - (Y1 + Y2)*Dy)} || {X, Y1, Y2} <- Data],
- draw_graf(Im, Plotdata, Colors, GA).
-
-draw_graf(Im, [{X1, Yproc1, Yport1}, {X2, Yproc2, Yport2}|Data], C, GA) when X2 - X1 < 1 ->
- draw_graf(Im, [{X1, [{Yproc2, Yport2},{Yproc1, Yport1}]}|Data], C, GA);
-
-draw_graf(Im, [{X1, Ys1}, {X2, Yproc2, Yport2}|Data], C, GA) when X2 - X1 < 1, is_list(Ys1) ->
- draw_graf(Im, [{X1, [{Yproc2, Yport2}|Ys1]}|Data], C, GA);
-
-draw_graf(Im, [{X1, Yproc1, Yport1}, {X2, Yproc2, Yport2}|Data], C = {B, PrC, PoC}, GA = #graph_area{y = Y0, height = H}) ->
- GyZero = trunc(Y0 + H),
- egd:filledRectangle(Im, {X1, GyZero}, {X2, Yproc1}, PrC),
- egd:filledRectangle(Im, {X1, Yproc1}, {X2, Yport1}, PoC),
- egd:line(Im, {X1, Yport1}, {X2, Yport1}, B), % top line
- egd:line(Im, {X1, Yport2}, {X1, Yport1}, B), % right line
- egd:line(Im, {X2, Yport1}, {X2, Yport2}, B), % right line
- draw_graf(Im, [{X2, Yproc2, Yport2}|Data], C, GA);
-
-draw_graf(Im, [{X1, Ys1 = [{Yproc1,Yport1}|_]}, {X2, Yproc2, Yport2}|Data], C = {B, PrC, PoC}, GA = #graph_area{y = Y0, height = H}) ->
- GyZero = trunc(Y0 + H),
- Yprocs = [Yp || {Yp, _} <- Ys1],
- Yports = [Yp || {_, Yp} <- Ys1],
-
- YprMin = lists:min(Yprocs),
- YprMax = lists:max(Yprocs),
- YpoMax = lists:max(Yports),
- egd:filledRectangle(Im, {X1, GyZero}, {X2, Yproc1}, PrC),
- egd:filledRectangle(Im, {X1, Yproc1}, {X2, Yport1}, PoC),
- egd:filledRectangle(Im, {X1, Yport1}, {X2, Yport1}, B), % top line
- egd:filledRectangle(Im, {X2, Yport1}, {X2, Yport2}, B), % right line
-
- egd:filledRectangle(Im, {X1, GyZero}, {X1, YprMin}, PrC), % left proc green line
- egd:filledRectangle(Im, {X1, YprMax}, {X1, YpoMax}, PoC), % left port line
- egd:filledRectangle(Im, {X1, YprMax}, {X1, YprMin}, B),
-
- draw_graf(Im, [{X2, Yproc2, Yport2}|Data], C, GA);
-draw_graf(_, _, _, _) -> ok.
-
-draw_xticks(Image, Color, XticksArea, {Xmin, Xmax}, Data) ->
- #graph_area{x = X0, y = Y0, width = Width} = XticksArea,
-
- DX = Width/(Xmax - Xmin),
- Offset = X0 - Xmin*DX,
- Y = trunc(Y0),
- Font = load_font(),
- {FontW, _FontH} = egd_font:size(Font),
- egd:filledRectangle(Image, {trunc(X0), Y}, {trunc(X0 + Width), Y}, Color),
- lists:foldl(
- fun ({X,_,_}, PX) ->
- X1 = trunc(Offset + X*DX),
-
- % Optimization:
- % if offset has past half the previous text
- % start checking this text
-
- if
- X1 > PX ->
- Text = lists:flatten(io_lib:format("~.3f", [float(X)])),
- TextLength = length(Text),
- TextWidth = TextLength*FontW,
- Spacing = 2,
- if
- X1 > PX + round(TextWidth/2) + Spacing ->
- egd:line(Image, {X1, Y - 3}, {X1, Y + 3}, Color),
- text(Image, {X1 - round(TextWidth/2), Y + 2}, Font, Text, Color),
- X1 + round(TextWidth/2) + Spacing;
- true ->
- PX
- end;
- true ->
- PX
- end
- end, 0, Data).
-
-draw_yticks(Im, Color, TickArea, {_,Ymax}) ->
- #graph_area{x = X0, y = Y0, width = Width, height = Height} = TickArea,
- Font = load_font(),
- X = trunc(X0 + Width),
- Dy = (Height)/(Ymax),
- Yts = if
- Height/(Ymax*12) < 1.0 -> round(1 + Ymax*15/Height);
- true -> 1
- end,
- egd:filledRectangle(Im, {X, trunc(0 + Y0)}, {X, trunc(Y0 + Height)}, Color),
- draw_yticks0(Im, Font, Color, 0, Yts, Ymax, {X, Height, Dy}).
-
-draw_yticks0(Im, Font, Color, Yi, Yts, Ymax, Area) when Yi < Ymax ->
- {X, Height, Dy} = Area,
- Y = round(Height - (Yi*Dy) + 3),
-
- egd:filledRectangle(Im, {X - 3, Y}, {X + 3, Y}, Color),
- Text = lists:flatten(io_lib:format("~p", [Yi])),
- text(Im, {0, Y - 4}, Font, Text, Color),
- draw_yticks0(Im, Font, Color, Yi + Yts, Yts, Ymax, Area);
-draw_yticks0(_, _, _, _, _, _, _) -> ok.
-
-%%% -------------------------------------
-%%% ACTIVITIES
-%%% -------------------------------------
-
-%% activities(Width, Height, Range, Activities) -> Binary
-%% In:
-%% Width = integer()
-%% Height = integer()
-%% Range = {float(), float()}
-%% Activities = [{float(), active | inactive}]
-%% Out:
-%% Binary = binary()
-
-activities(Width, Height, {UXmin, UXmax}, Activities) ->
- Xs = [ X || {X,_} <- Activities],
- Xmin = lists:min(Xs),
- Xmax = lists:max(Xs),
- activities0(Width, Height, {lists:min([Xmin, UXmin]), lists:max([UXmax, Xmax])}, Activities).
-
-activities(Width, Height, Activities) ->
- Xs = [ X || {X,_} <- Activities],
- Xmin = lists:min(Xs),
- Xmax = lists:max(Xs),
- activities0(Width, Height, {Xmin, Xmax}, Activities).
-
-activities0(Width, Height, {Xmin, Xmax}, Activities) ->
- Image = egd:create(Width, Height),
- Grey = egd:color(Image, {200, 200, 200}),
- HO = 20,
- ActivityArea = #graph_area{x = HO, y = 0, width = Width - 2*HO, height = Height},
- egd:filledRectangle(Image, {0, 0}, {Width, Height}, Grey),
- draw_activity(Image, {Xmin, Xmax}, ActivityArea, Activities),
- Binary = egd:render(Image, png),
- egd:destroy(Image),
- Binary.
-
-draw_activity(Image, {Xmin, Xmax}, Area = #graph_area{ width = Width }, Acts) ->
- White = egd:color({255, 255, 255}),
- Green = egd:color({0,250, 0}),
- Black = egd:color({0, 0, 0}),
-
- Dx = Width/(Xmax - Xmin),
-
- draw_activity(Image, {Xmin, Xmax}, Area, {White, Green, Black}, Dx, Acts).
-
-draw_activity(_, _, _, _, _, [_]) -> ok;
-draw_activity(Image, {Xmin, Xmax}, Area = #graph_area{ height = Height, x = X0 }, {Cw, Cg, Cb}, Dx, [{Xa1, State}, {Xa2, Act2} | Acts]) ->
- X1 = erlang:trunc(X0 + Dx*Xa1 - Xmin*Dx),
- X2 = erlang:trunc(X0 + Dx*Xa2 - Xmin*Dx),
-
- case State of
- inactive ->
- egd:filledRectangle(Image, {X1, 0}, {X2, Height - 1}, Cw),
- egd:rectangle(Image, {X1, 0}, {X2, Height - 1}, Cb);
- active ->
- egd:filledRectangle(Image, {X1, 0}, {X2, Height - 1}, Cg),
- egd:rectangle(Image, {X1, 0}, {X2, Height - 1}, Cb)
- end,
- draw_activity(Image, {Xmin, Xmax}, Area, {Cw, Cg, Cb}, Dx, [{Xa2, Act2} | Acts]).
-
-
-
-%%% -------------------------------------
-%%% Process lifetime
-%%% Used by processes page
-%%% -------------------------------------
-
-proc_lifetime(Width, Height, Start, End, ProfileTime) ->
- Im = egd:create(round(Width), round(Height)),
- Black = egd:color(Im, {0, 0, 0}),
- Green = egd:color(Im, {0, 255, 0}),
-
- % Ratio and coordinates
-
- DX = (Width-1)/ProfileTime,
- X1 = round(DX*Start),
- X2 = round(DX*End),
-
- % Paint
- egd:filledRectangle(Im, {X1, 0}, {X2, Height - 1}, Green),
- egd:rectangle(Im, {X1, 0}, {X2, Height - 1}, Black),
-
- Binary = egd:render(Im, png),
- egd:destroy(Im),
- Binary.
-
-%%% -------------------------------------
-%%% Percentage
-%%% Used by process_info page
-%%% Percentage should be 0.0 -> 1.0
-%%% -------------------------------------
-percentage(Width, Height, Percentage) ->
- Im = egd:create(round(Width), round(Height)),
- Font = load_font(),
- Black = egd:color(Im, {0, 0, 0}),
- Green = egd:color(Im, {0, 255, 0}),
-
- % Ratio and coordinates
-
- X = round(Width - 1 - Percentage*(Width - 1)),
-
- % Paint
- egd:filledRectangle(Im, {X, 0}, {Width - 1, Height - 1}, Green),
- {FontW, _} = egd_font:size(Font),
- String = lists:flatten(io_lib:format("~.10B %", [round(100*Percentage)])),
-
- text( Im,
- {round(Width/2 - (FontW*length(String)/2)), 0},
- Font,
- String,
- Black),
- egd:rectangle(Im, {X, 0}, {Width - 1, Height - 1}, Black),
-
- Binary = egd:render(Im, png),
- egd:destroy(Im),
- Binary.
-
-
-load_font() ->
- Filename = filename:join([code:priv_dir(percept),"fonts", "6x11_latin1.wingsfont"]),
- egd_font:load(Filename).
-
-text(Image, {X,Y}, Font, Text, Color) ->
- egd:text(Image, {X,Y-2}, Font, Text, Color).
diff --git a/lib/percept/test/Makefile b/lib/percept/test/Makefile
deleted file mode 100644
index 87fde49410..0000000000
--- a/lib/percept/test/Makefile
+++ /dev/null
@@ -1,91 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-
-include $(ERL_TOP)/make/target.mk
-
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-
-MODULES= \
- ipc_tree \
- percept_SUITE \
- egd_SUITE
-
-EBIN = .
-
-HRL_FILES=
-
-ERL_FILES= $(MODULES:%=%.erl)
-
-TARGET_FILES = $(MODULES:%=$(EBIN)/%.$(EMULATOR))
-
-SOURCE = $(ERL_FILES) $(HRL_FILES)
-
-EMAKEFILE=Emakefile
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/percept_test
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-ERL_MAKE_FLAGS +=
-ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/percept/include
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-make_emakefile:
- $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) $(MODULES)\
- > $(EMAKEFILE)
-
-tests debug opt: make_emakefile
- erl $(ERL_MAKE_FLAGS) -make
-
-clean:
- rm -f $(EMAKEFILE)
- rm -f $(TARGET_FILES)
- rm -f core *~
-
-docs:
-
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
-
-release_tests_spec: make_emakefile
- $(INSTALL_DIR) "$(RELSYSDIR)"
- $(INSTALL_DATA) percept.spec percept.cover $(EMAKEFILE) $(SOURCE) "$(RELSYSDIR)"
- chmod -R u+w "$(RELSYSDIR)"
- @tar cf - *_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -)
-
-release_docs_spec:
-
-
diff --git a/lib/percept/test/egd_SUITE.erl b/lib/percept/test/egd_SUITE.erl
deleted file mode 100644
index 401695dddd..0000000000
--- a/lib/percept/test/egd_SUITE.erl
+++ /dev/null
@@ -1,389 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(egd_SUITE).
--include_lib("common_test/include/ct.hrl").
-
-%% Test server specific exports
--export([all/0, suite/0]).
--export([init_per_suite/1, end_per_suite/1]).
--export([init_per_testcase/2, end_per_testcase/2]).
-
-%% Test cases
--export([image_create_and_destroy/1,
- image_shape/1,
- image_primitives/1,
- image_colors/1,
- image_font/1,
- image_fans/1,
- image_png_compliant/1]).
-
-suite() ->
- [{ct_hooks,[ts_install_cth]},
- {timetrap, {minutes, 1}}].
-
-all() ->
- [image_create_and_destroy, image_shape,
- image_primitives, image_colors, image_font,
- image_fans,
- image_png_compliant].
-
-
-init_per_suite(Config) when is_list(Config) ->
- rand:seed(exsplus),
- Config.
-
-end_per_suite(Config) when is_list(Config) ->
- Config.
-
-init_per_testcase(_Case, Config) ->
- [{max_size, 800}|Config].
-
-end_per_testcase(_Case, _Config) ->
- ok.
-
-%%----------------------------------------------------------------------
-%% Tests
-%%----------------------------------------------------------------------
-
-%% Image creation and destroy test.
-image_create_and_destroy(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Image = egd:create(W, H),
- ok = egd:destroy(Image),
- ok.
-
-%% Image color test.
-image_colors(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Dir = proplists:get_value(priv_dir, Config),
- Image = egd:create(W, H),
- put(image_size, {W,H}),
-
- RGB = get_rgb(),
- Black = egd:color({0,0,0}),
- Red = egd:color({255,0,0}),
- Green = egd:color({0,255,0}),
- Blue = egd:color({0,0,255}),
- Random = egd:color(Image, RGB),
-
- ok = egd:line(Image, get_point(), get_point(), Random),
- ok = egd:line(Image, get_point(), get_point(), Red),
- ok = egd:line(Image, get_point(), get_point(), Green),
- ok = egd:line(Image, get_point(), get_point(), Black),
- ok = egd:line(Image, get_point(), get_point(), Blue),
-
- HtmlDefaultNames = [black,silver,gray,white,maroon,red,
- purple,fuchia,green,lime,olive,yellow,navy,blue,teal,
- aqua],
-
- lists:foreach(fun (ColorName) ->
- Color = egd:color(ColorName),
- ok = egd:line(Image, get_point(), get_point(), Color)
- end, HtmlDefaultNames),
-
- Png1 = <<_/binary>> = egd:render(Image,png,[{render_engine, alpha}]),
- File1 = filename:join(Dir,"image_colors_alpha.png"),
- ok = egd:save(Png1,File1),
- ct:log("<p>Image alpha:</p><img src=\"~s\" />~n", [File1]),
- Png2 = <<_/binary>> = egd:render(Image,png,[{render_engine, opaque}]),
- File2 = filename:join(Dir,"image_colors_opaque.png"),
- ok = egd:save(Png2,File2),
- ct:log("<p>Image opaque:</p><img src=\"~s\" />~n", [File2]),
-
- ok = egd:destroy(Image),
- erase(image_size),
- ok.
-
-%% Image shape API test.
-image_shape(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Dir = proplists:get_value(priv_dir, Config),
- put(image_size, {W,H}),
- Im = egd:create(W, H),
-
- Fgc = egd:color({255,0,0}),
-
- ok = egd:line(Im, get_point(), get_point(), Fgc),
- ok = egd:rectangle(Im, get_point(), get_point(), Fgc),
- ok = egd:filledEllipse(Im, get_point(), get_point(), Fgc),
- ok = egd:arc(Im, get_point(), get_point(), Fgc),
- ok = egd:arc(Im, get_point(), get_point(), 100, Fgc),
-
- Pt1 = get_point(),
- Pt2 = get_point(),
-
- ok = egd:filledRectangle(Im, Pt1, Pt2, Fgc),
-
- Bitmap = egd:render(Im, raw_bitmap),
-
- ok = bitmap_point_has_color(Bitmap, {W,H}, Pt2, Fgc),
- ok = bitmap_point_has_color(Bitmap, {W,H}, Pt1, Fgc),
-
- Bin = <<_/binary>> = egd:render(Im, raw_bitmap, [{render_engine, alpha}]),
- Png = egd_png:binary(W,H,Bin),
- File = filename:join(Dir,"image_shape.png"),
- ok = egd:save(Png,File),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File]),
-
- ok = egd:destroy(Im),
-
- erase(image_size),
- ok.
-
-%% Image shape API test.
-image_primitives(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Dir = proplists:get_value(priv_dir, Config),
- put(image_size, {W,H}),
-
- Im0 = egd_primitives:create(W, H),
- Fgc = egd:color({25,25,255}),
- Bgc = egd:color({0,250,25}),
-
- Im1 = lists:foldl(fun ({Function, Arguments}, Im) ->
- erlang:apply(egd_primitives, Function, [Im|Arguments])
- end, Im0,
- [{Fs, [get_point(), get_point(), Bgc]} || Fs <- [line, rectangle, filledEllipse, arc]] ++
- [{pixel, [get_point(), Bgc]},
- {filledTriangle, [get_point(), get_point(), get_point(), Bgc]}]),
-
- Pt1 = get_point(),
- Pt2 = get_point(),
-
- Im2 = egd_primitives:filledRectangle(Im1, Pt1, Pt2, Fgc),
-
- Bitmap = egd_render:binary(Im2, opaque),
-
- ok = bitmap_point_has_color(Bitmap, {W,H}, Pt2, Fgc),
- ok = bitmap_point_has_color(Bitmap, {W,H}, Pt1, Fgc),
-
- Bin = <<_/binary>> = egd_render:binary(Im2, alpha),
- Png = egd_png:binary(W,H,Bin),
- File = filename:join(Dir,"image_primitives.png"),
- ok = egd:save(Png,File),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File]),
-
- erase(image_size),
- ok.
-
-%% Image font test.
-image_font(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Dir = proplists:get_value(priv_dir, Config),
- put(image_size, {W,H}),
- Im = egd:create(W, H),
- Fgc = egd:color({0,130,0}),
-
- Filename = filename:join([code:priv_dir(percept),"fonts","6x11_latin1.wingsfont"]),
- Font = egd_font:load(Filename),
-
- % simple text
- ok = egd:text(Im, get_point(), Font, "Hello World", Fgc),
- <<_/binary>> = egd:render(Im, png),
-
- GlyphStr1 = " !\"#$%&'()*+,-./", % Codes 32 -> 47
- NumericStr = "0123456789", % Codes 48 -> 57
- GlyphStr2 = ":;<=>?@", % Codes 58 -> 64
- AlphaBigStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZ", % Codes 65 -> 90
- GlyphStr3 = "[\\]^_`", % Codes 91 -> 96
- AlphaSmStr = "abcdefghijklmnopqrstuvwxyz", % Codes 97 -> 122
- GlyphStr4 = "{|}~", % Codes 123 -> 126
-
- ok = egd:text(Im, get_point(), Font, GlyphStr1, Fgc),
- Png1 = <<_/binary>> = egd:render(Im, png),
- File1 = filename:join(Dir,"text1.png"),
- ok = egd:save(Png1,File1),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File1]),
-
- ok = egd:text(Im, get_point(), Font, NumericStr, Fgc),
- Png2 = <<_/binary>> = egd:render(Im, png),
- File2 = filename:join(Dir,"text2.png"),
- ok = egd:save(Png2,File2),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File2]),
-
- ok = egd:text(Im, get_point(), Font, GlyphStr2, Fgc),
- Png3 = <<_/binary>> = egd:render(Im, png),
- File3 = filename:join(Dir,"text3.png"),
- ok = egd:save(Png3,File3),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File3]),
-
- ok = egd:text(Im, get_point(), Font, AlphaBigStr, Fgc),
- Png4 = <<_/binary>> = egd:render(Im, png),
- File4 = filename:join(Dir,"text4.png"),
- ok = egd:save(Png4,File4),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File4]),
-
- ok = egd:text(Im, get_point(), Font, GlyphStr3, Fgc),
- Png5 = <<_/binary>> = egd:render(Im, png),
- File5 = filename:join(Dir,"text5.png"),
- ok = egd:save(Png5,File5),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File5]),
-
- ok = egd:text(Im, get_point(), Font, AlphaSmStr, Fgc),
- Png6 = <<_/binary>> = egd:render(Im, png),
- File6 = filename:join(Dir,"text6.png"),
- ok = egd:save(Png6,File6),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File6]),
-
- ok = egd:text(Im, get_point(), Font, GlyphStr4, Fgc),
- Png7 = <<_/binary>> = egd:render(Im, png),
- File7 = filename:join(Dir,"text7.png"),
- ok = egd:save(Png7,File7),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File7]),
-
- ok = egd:destroy(Im),
- erase(image_size),
- ok.
-
-%% Image png compliant test.
-image_png_compliant(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- put(image_size, {W,H}),
- Im = egd:create(W, H),
- Fgc = egd:color({0,0,0}),
- ok = egd:filledRectangle(Im, get_point(), get_point(), Fgc),
-
- Bin = egd:render(Im, png),
- true = binary_is_png_compliant(Bin),
-
- ok = egd:destroy(Im),
- erase(image_size),
- ok.
-
-image_fans(Config) when is_list(Config) ->
- W = 1024,
- H = 800,
- Dir = proplists:get_value(priv_dir, Config),
-
- Fun = fun({F,Args},Im) ->
- erlang:apply(egd_primitives,F,[Im|Args])
- end,
-
- %% fan1
- Ops1 = gen_vertical_fan(1,{0,400},egd:color(red),1024,800,-15),
- Ops2 = gen_horizontal_fan(1,{512,800},egd:color(green),1024,0,-15),
-
- Im0 = egd_primitives:create(W,H),
- Im1 = lists:foldl(Fun, Im0, Ops1 ++ Ops2),
- Bin1 = egd_render:binary(Im1, opaque),
- Png1 = egd_png:binary(W,H,Bin1),
-
- File1 = filename:join(Dir,"fan1_opaque.png"),
- ok = egd:save(Png1,File1),
- ct:log("<p>Image opaque width 1:</p><img src=\"~s\" />~n", [File1]),
-
- Bin2 = egd_render:binary(Im1, alpha),
- Png2 = egd_png:binary(W,H,Bin2),
-
- File2 = filename:join(Dir,"fan1_alpha.png"),
- ok = egd:save(Png2,File2),
- ct:log("<p>Image alpha width 1:</p><img src=\"~s\" />~n", [File2]),
-
-
- %% fan2
- Ops3 = gen_vertical_fan(7,{0,400},egd:color(red),1024,800,-15),
- Ops4 = gen_horizontal_fan(7,{512,800},egd:color(green),1024,0,-15),
-
- Im2 = lists:foldl(Fun, Im0, Ops3 ++ Ops4),
- Bin3 = egd_render:binary(Im2, opaque),
- Png3 = egd_png:binary(W,H,Bin3),
-
- File3 = filename:join(Dir,"fan2_opaque.png"),
- ok = egd:save(Png3,File3),
- ct:log("<p>Image opaque width 7:</p><img src=\"~s\" />~n", [File3]),
-
- Bin4 = egd_render:binary(Im2, alpha),
- Png4 = egd_png:binary(W,H,Bin4),
-
- File4 = filename:join(Dir,"fan2_alpha.png"),
- ok = egd:save(Png4,File4),
- ct:log("<p>Image alpha width 7:</p><img src=\"~s\" />~n", [File4]),
- ok.
-
-gen_vertical_fan(Wd,Pt,C,X,Y,Step) when Y > 0 ->
- [{line,[Pt,{X,Y},Wd,C]}|gen_vertical_fan(Wd,Pt,C,X,Y + Step,Step)];
-gen_vertical_fan(_,_,_,_,_,_) -> [].
-
-gen_horizontal_fan(Wd,Pt,C,X,Y,Step) when X > 0 ->
- [{line,[Pt,{X,Y},Wd,C]}|gen_horizontal_fan(Wd,Pt,C,X + Step,Y,Step)];
-gen_horizontal_fan(_,_,_,_,_,_) -> [].
-
-
-%%----------------------------------------------------------------------
-%% Auxiliary tests
-%%----------------------------------------------------------------------
-
-bitmap_point_has_color(Bitmap, {W,_}, {X,Y}, C) ->
- {CR,CG,CB,_} = egd_primitives:rgb_float2byte(C),
- N = W*Y*3 + X*3,
- << _:N/binary, R,G,B, _/binary>> = Bitmap,
- case {R,G,B} of
- {CR,CG,CB} -> ok;
- Other ->
- io:format("bitmap_point_has_color: error color was ~p, should be ~p~n", [Other, {CR,CG,CB}]),
- {error, {Other,{CR,CG,CB}}}
- end.
-
-binary_is_png_compliant(PngBin) ->
- {Bin, _} = split_binary(PngBin, 10),
- List = binary_to_list(Bin),
- case lists:sublist(List, 2,3) of
- "PNG" -> true;
- Other ->
- io:format("img -> ~p~n", [Other]),
- false
- end.
-
-%%----------------------------------------------------------------------
-%% Auxiliary
-%%----------------------------------------------------------------------
-
-
-get_rgb() ->
- R = random(255),
- G = random(255),
- B = random(255),
- {R,G,B}.
-
-get_angle() ->
- random(359).
-
-get_point() ->
- get_point(get(image_size)).
-get_point({W,H}) ->
- X = random(W - 1),
- Y = random(H - 1),
- {X,Y}.
-
-get_size(Max) ->
- W = trunc(random(Max/2) + Max/2 + 1),
- H = trunc(random(Max/2) + Max/2 + 1),
- io:format("Image size will be ~p x ~p~n", [W,H]),
- {W,H}.
-
-get_points(N) ->
- get_points(N, []).
-get_points(0, Out) ->
- Out;
-get_points(N, Out) ->
- get_points(N - 1, [get_point() | Out]).
-
-random(N) -> trunc(rand:uniform(trunc(N + 1)) - 1).
diff --git a/lib/percept/test/ipc_tree.erl b/lib/percept/test/ipc_tree.erl
deleted file mode 100644
index 29da20e83f..0000000000
--- a/lib/percept/test/ipc_tree.erl
+++ /dev/null
@@ -1,49 +0,0 @@
-%% ``Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
-%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
-%% AB. All Rights Reserved.''
-%%
-%% $Id$
-%%
-
--module(ipc_tree).
--export([go/1, init/2]).
-
-go(N) ->
- start(N, self()),
- receive stop -> ok end.
-
-start(Depth, ParentPid) ->
- spawn(?MODULE, init, [Depth, ParentPid]).
-
-init(0, ParentPid) ->
- workload(5000),
- ParentPid ! stop,
- ok;
-init(Depth, ParentPid) ->
- Pid1 = spawn(?MODULE, init, [Depth - 1, self()]),
- Pid2 = spawn(?MODULE, init, [Depth - 1, self()]),
- main([Pid1,Pid2], ParentPid).
-
-main(Pids, ParentPid) ->
- workload(5000),
- gather(Pids),
- ParentPid ! stop,
- ok.
-
-gather([]) -> ok;
-gather([_|Pids]) -> receive _ -> gather(Pids) end.
-
-workload(0) -> ok;
-workload(N) -> _ = math:sin(2), workload(N - 1).
diff --git a/lib/percept/test/percept.cover b/lib/percept/test/percept.cover
deleted file mode 100644
index 8a5ad0a55e..0000000000
--- a/lib/percept/test/percept.cover
+++ /dev/null
@@ -1,2 +0,0 @@
-{incl_app,percept,details}.
-
diff --git a/lib/percept/test/percept.spec b/lib/percept/test/percept.spec
deleted file mode 100644
index f3ef76bd60..0000000000
--- a/lib/percept/test/percept.spec
+++ /dev/null
@@ -1 +0,0 @@
-{suites,"../percept_test",all}.
diff --git a/lib/percept/test/percept_SUITE.erl b/lib/percept/test/percept_SUITE.erl
deleted file mode 100644
index 2be8b70e0d..0000000000
--- a/lib/percept/test/percept_SUITE.erl
+++ /dev/null
@@ -1,126 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(percept_SUITE).
--include_lib("common_test/include/ct.hrl").
-
-%% Test server specific exports
--export([all/0, suite/0]).
-
-%% Test cases
--export([app/1,
- appup/1,
- profile/1,
- analyze/1,
- analyze_dist/1,
- webserver/1]).
-
-suite() ->
- [{ct_hooks,[ts_install_cth]},
- {timetrap, {minutes, 2}}].
-
-all() ->
- [app, appup, webserver, profile,
- analyze, analyze_dist].
-
-
-%%----------------------------------------------------------------------
-%% Tests
-%%----------------------------------------------------------------------
-
-%% Test that the percept app file is ok
-app(Config) when is_list(Config) ->
- ok = test_server:app_test(percept).
-
-%% Test that the percept appup file is ok
-appup(Config) when is_list(Config) ->
- ok = test_server:appup_test(percept).
-
-%% Percept webserver test.
-webserver(Config) when is_list(Config) ->
- % Explicit start inets?
- {started, _, Port} = percept:start_webserver(),
- ok = percept:stop_webserver(Port),
- {started, _, _} = percept:start_webserver(),
- ok = percept:stop_webserver(),
- {started, _, NewPort} = percept:start_webserver(),
- ok = percept:stop_webserver(NewPort),
- application:stop(inets),
- ok.
-
-%% Percept profile test.
-profile(Config) when is_list(Config) ->
- Path = proplists:get_value(data_dir, Config),
- File = filename:join([Path,"profile_test.dat"]),
- {ok, _} = percept:profile(File, [procs]),
- ipc_tree:go(7),
- ok = percept:stop_profile(),
- ok.
-
-%% Percept analyze test.
-analyze(Config) when is_list(Config) ->
- Begin = processes(),
- Path = proplists:get_value(data_dir, Config),
- File = filename:join([Path,"profile_test.dat"]),
- T0 = erlang:monotonic_time(millisecond),
- ok = percept:analyze(File),
- T1 = erlang:monotonic_time(millisecond),
- io:format("percept:analyze/1 took ~w ms.~n", [T1 - T0]),
- {stopped, _} = percept_db:stop(),
- print_remainers(remainers(Begin, processes())),
- ok.
-
-%% Percept analyze distribution test.
-analyze_dist(Config) when is_list(Config) ->
- Begin = processes(),
- Path = proplists:get_value(data_dir, Config),
- File = filename:join([Path,"ipc-dist.dat"]),
- T0 = erlang:monotonic_time(millisecond),
- ok = percept:analyze(File),
- T1 = erlang:monotonic_time(millisecond),
- io:format("percept:analyze/1 took ~w ms.~n", [T1 - T0]),
- {stopped, _} = percept_db:stop(),
- print_remainers(remainers(Begin, processes())),
- ok.
-
-%%----------------------------------------------------------------------
-%% Auxiliary tests
-%%----------------------------------------------------------------------
-
-%%----------------------------------------------------------------------
-%% Auxiliary
-%%----------------------------------------------------------------------
-
-print_remainers([]) -> ok;
-print_remainers([Pid|Pids]) ->
- io:format("[Pid ~p] [Entry ~p] [Name ~p]~n", [
- Pid,
- erlang:process_info(Pid, initial_call),
- erlang:process_info(Pid, registered_name)
- ]),
- print_remainers(Pids).
-
-remainers(Begin, End) -> remainers(Begin, End, []).
-remainers(_, [], Out) -> lists:reverse(Out);
-remainers(Begin, [Pid|End], Out) ->
- case lists:member(Pid, Begin) of
- true -> remainers(Begin, End, Out);
- false -> remainers(Begin, End, [Pid|Out])
- end.
diff --git a/lib/percept/test/percept_SUITE_data/ipc-dist.dat b/lib/percept/test/percept_SUITE_data/ipc-dist.dat
deleted file mode 100644
index 14ab6c0c5d..0000000000
--- a/lib/percept/test/percept_SUITE_data/ipc-dist.dat
+++ /dev/null
Binary files differ
diff --git a/lib/percept/test/percept_db_SUITE.erl b/lib/percept/test/percept_db_SUITE.erl
deleted file mode 100644
index b2827e0e42..0000000000
--- a/lib/percept/test/percept_db_SUITE.erl
+++ /dev/null
@@ -1,55 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(percept_db_SUITE).
--include_lib("common_test/include/ct.hrl").
-
-%% Test server specific exports
--export([all/0, suite/0]).
-
-%% Test cases
--export([start/1]).
-
-%% Default timetrap timeout (set in init_per_testcase)
--define(restarts, 10).
--define(alive_timeout, 500).
-
-suite() ->
- [{timetrap, {minutes, 2}}].
-
-all() ->
- [start].
-
-%%----------------------------------------------------------------------
-%% Tests
-%%----------------------------------------------------------------------
-
-%% Percept_db start and restart test.
-start(Config) when is_list(Config) ->
- ok = restart(?restarts),
- {stopped, _DB} = percept_db:stop(),
- ok.
-
-restart(0)-> ok;
-restart(N)->
- {_, DB} = percept_db:start(),
- timer:sleep(?alive_timeout),
- true = erlang:is_process_alive(DB),
- restart(N-1).
diff --git a/lib/percept/vsn.mk b/lib/percept/vsn.mk
deleted file mode 100644
index 614cee8645..0000000000
--- a/lib/percept/vsn.mk
+++ /dev/null
@@ -1 +0,0 @@
-PERCEPT_VSN = 0.9
diff --git a/lib/public_key/asn1/PKCS-8.asn1 b/lib/public_key/asn1/PKCS-8.asn1
index 8412345b68..292a7b2029 100644
--- a/lib/public_key/asn1/PKCS-8.asn1
+++ b/lib/public_key/asn1/PKCS-8.asn1
@@ -26,7 +26,7 @@ BEGIN
-- This import is really unnecessary since ALGORITHM-IDENTIFIER is defined as a
-- TYPE-IDENTIFIER
--- Renome this import and replace all occurences of ALGORITHM-IDENTIFIER with
+-- Rename this import and replace all occurrences of ALGORITHM-IDENTIFIER with
-- TYPE-IDENTIFIER as a workaround for weaknesses in the ASN.1 compiler
--AlgorithmIdentifier, ALGORITHM-IDENTIFIER
-- FROM PKCS5v2-0 {iso(1) member-body(2) us(840) rsadsi(113549)
diff --git a/lib/public_key/doc/src/public_key.xml b/lib/public_key/doc/src/public_key.xml
index 75ae7ecb7b..2300ce3937 100644
--- a/lib/public_key/doc/src/public_key.xml
+++ b/lib/public_key/doc/src/public_key.xml
@@ -760,6 +760,39 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
</func>
<func>
+ <name>pkix_verify_hostname(Cert, ReferenceIDs) -> boolean()</name>
+ <name>pkix_verify_hostname(Cert, ReferenceIDs, Opts) -> boolean()</name>
+ <fsummary>Verifies that a PKIX x.509 certificate <i>presented identifier</i> (e.g hostname) is
+ an expected one.</fsummary>
+ <type>
+ <v>Cert = der_encoded() | #'OTPCertificate'{} </v>
+ <v>ReferenceIDs = [ RefID ]</v>
+ <v>RefID = {IdType,string()}</v>
+ <v>IdType = dns_id | srv_id | uri_id</v>
+ <v>Opts = [ PvhOpt() ]</v>
+ <v>PvhOpt = [MatchOpt | FailCallBackOpt | FqdnExtractOpt]</v>
+ <v>MatchOpt = {fun(RefId | FQDN::string(), PresentedID) -> boolean() | default}</v>
+ <v>PresentedID = {dNSName,string()} | {uniformResourceIdentifier,string()}</v>
+ <v>FailCallBackOpt = {fail_callback, fun(#'OTPCertificate'{}) -> boolean()}</v>
+ <v>FqdnExtractOpt = {fqdn_fun, fun(RefID) -> FQDN::string() | default | undefined}</v>
+ </type>
+ <desc>
+ <p>This function checks that the <i>Presented Identifier</i> (e.g hostname) in a peer certificate
+ conforms with the Expected Identifier that the client wants to connect to.
+ This functions is intended to be added as an extra client check to the peer certificate when performing
+ <seealso marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_validation/3</seealso>
+ </p>
+ <p>See <url href="https://tools.ietf.org/html/rfc6125">RFC 6125</url>
+ for detailed information about hostname verification.
+ The <seealso marker="using_public_key#verify_hostname">User's Manual</seealso>
+ and
+ <seealso marker="using_public_key#verify_hostname_examples">code examples</seealso>
+ describes this function more detailed.
+ </p>
+ </desc>
+ </func>
+
+ <func>
<name>sign(Msg, DigestType, Key) -> binary()</name>
<fsummary>Creates a digital signature.</fsummary>
<type>
@@ -827,6 +860,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
<func>
<name>ssh_hostkey_fingerprint(HostKey) -> string()</name>
<name>ssh_hostkey_fingerprint(DigestType, HostKey) -> string()</name>
+ <name>ssh_hostkey_fingerprint([DigestType], HostKey) -> [string()]</name>
<fsummary>Calculates a ssh fingerprint for a hostkey.</fsummary>
<type>
<v>Key = public_key()</v>
@@ -850,6 +884,10 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
5> public_key:ssh_hostkey_fingerprint(sha256,Key).
"SHA256:aZGXhabfbf4oxglxltItWeHU7ub3Dc31NcNw2cMJePQ"
+
+ 6> public_key:ssh_hostkey_fingerprint([sha,sha256],Key).
+ ["SHA1:bSLY/C4QXLDL/Iwmhyg0PGW9UbY",
+ "SHA256:aZGXhabfbf4oxglxltItWeHU7ub3Dc31NcNw2cMJePQ"]
</code>
</desc>
</func>
diff --git a/lib/public_key/doc/src/using_public_key.xml b/lib/public_key/doc/src/using_public_key.xml
index e3a1eed4be..417d479da3 100644
--- a/lib/public_key/doc/src/using_public_key.xml
+++ b/lib/public_key/doc/src/using_public_key.xml
@@ -417,6 +417,259 @@ true = public_key:verify(Digest, none, Signature, PublicKey),</code>
</section>
+ <section>
+ <marker id="verify_hostname"></marker>
+ <title>Verifying a certificate hostname</title>
+ <section>
+ <title>Background</title>
+ <p>When a client checks a server certificate there are a number of checks available like
+ checks that the certificate is not revoked, not forged or not out-of-date.
+ </p>
+ <p>There are however attacks that are not detected by those checks. Suppose a bad guy has
+ succeded with a DNS infection. Then the client could belive it is connecting to one host but
+ ends up at another but evil one. Though it is evil, it could have a perfectly legal
+ certificate! The certificate has a valid signature, it is not revoked, the certificate chain
+ is not faked and has a trusted root and so on.
+ </p>
+ <p>To detect that the server is not the intended one, the client must additionaly perform
+ a <i>hostname verification</i>. This procedure is described in
+ <url href="https://tools.ietf.org/html/rfc6125">RFC 6125</url>. The idea is that the certificate
+ lists the hostnames it could be fetched from. This is checked by the certificate issuer when
+ the certificate is signed. So if the certificate is issued by a trusted root the client
+ could trust the host names signed in it.
+ </p>
+ <p>There is a default hostname matching procedure defined in
+ <url href="https://tools.ietf.org/html/rfc6125#section-6">RFC 6125, section 6</url>
+ as well as protocol dependent variations defined in
+ <url href="https://tools.ietf.org/html/rfc6125#appendix-B">RFC 6125 appendix B</url>.
+ The default procedure is implemented in
+ <seealso marker="public_key:public_key#pkix_verify_hostname-2">public_key:pkix_verify_hostname/2,3</seealso>.
+ It is possible for a client to hook in modified rules using the options list.
+ </p>
+ <p>Some terminology is needed: the certificate presents hostname(s) on which it is valid.
+ Those are called <i>Presented IDs</i>. The hostname(s) the client belives it connects to
+ are called <i>Reference IDs</i>. The matching rules aims to verify that there is at least
+ one of the Reference IDs that matches one of the Presented IDs. If not, the verification fails.
+ </p>
+ <p>The IDs contains normal fully qualified domain names like e.g <c>foo.example.com</c>,
+ but IP addresses are not recommended. The rfc describes why this is not recommended as well
+ as security considerations about how to aquire the Reference IDs.
+ </p>
+ <p>Internationalized domain names are not supported.
+ </p>
+ </section>
+ <section>
+ <title>The verification process</title>
+ <p>Traditionally the Presented IDs were found in the <c>Subject</c> certificate field as <c>CN</c>
+ names. This is still quite common. When printing a certificate they show up as:
+ </p>
+ <code>
+ $ openssl x509 -text &lt; cert.pem
+ ...
+ Subject: C=SE, CN=example.com, CN=*.example.com, O=erlang.org
+ ...
+ </code>
+ <p>The example <c>Subject</c> field has one C, two CN and one O part. It is only the
+ CN (Common Name) that is used by hostname verification. The two other (C and O) is not used
+ here even when they contain a domain name like the O part. The C and O parts are defined
+ elsewhere and meaningful only for other functions.
+ </p>
+ <p>In the example the Presented IDs are <c>example.com</c> as well as hostnames matching
+ <c>*.example.com</c>. For example <c>foo.example.com</c> and <c>bar.example.com</c> both
+ matches but not <c>foo.bar.example.com</c>. The name <c>erlang.org</c> matches neither
+ since it is not a CN.
+ </p>
+ <p>In case where the Presented IDs are fetched from the <c>Subject</c> certificate field, the
+ names may contain wildcard characters. The function handles this as defined in
+ <url href="https://tools.ietf.org/html/rfc6125#section-6.4.3">chapter 6.4.3 in RFC 6125</url>.
+ </p>
+ <p>There may only be one wildcard character and that is in the first label, for example:
+ <c>*.example.com</c>. This matches <c>foo.example.com</c> but neither <c>example.com</c> nor
+ <c>foo.bar.example.com</c>.
+ </p>
+ <p>There may be label characters before or/and after the wildcard. For example:
+ <c>a*d.example.com</c> matches <c>abcd.example.com</c> and <c>ad.example.com</c>,
+ but not <c>ab.cd.example.com</c>.
+ </p>
+ <p>In the previous example there is no indication of which protocols are expected. So a client
+ has no indication of whether it is a web server, an ldap server or maybe a sip server it is
+ connected to.
+ There are fields in the certificate that can indicate this. To be more exact, the rfc
+ introduces the usage of the <c>X509v3 Subject Alternative Name</c> in the <c>X509v3 extensions</c>
+ field:
+ </p>
+ <code>
+ $ openssl x509 -text &lt; cert.pem
+ ...
+ X509v3 extensions:
+ X509v3 Subject Alternative Name:
+ DNS:kb.example.org, URI:https://www.example.org
+ ...
+ </code>
+ <p>Here <c>kb.example.org</c> serves any protocol while <c>www.example.org</c> presents a secure
+ web server.
+ </p>
+
+ <p>The next example has both <c>Subject</c> and <c>Subject Alternate Name</c> present:</p>
+ <code>
+ $ openssl x509 -text &lt; cert.pem
+ ...
+ Subject: C=SE, CN=example.com, CN=*.example.com, O=erlang.org
+ ...
+ X509v3 extensions:
+ X509v3 Subject Alternative Name:
+ DNS:kb.example.org, URI:https://www.example.org
+ ...
+ </code>
+ <p>The RFC states that if a certificate defines Reference IDs in a <c>Subject Alternate Name</c>
+ field, the <c>Subject</c> field MUST NOT be used for host name checking, even if it contains
+ valid CN names.
+ Therefore only <c>kb.example.org</c> and <c>https://www.example.org</c> matches. The match fails
+ both for <c>example.com</c> and <c>foo.example.com</c> becuase they are in the <c>Subject</c>
+ field which is not checked because the <c>Subject Alternate Name</c> field is present.
+ </p>
+ </section>
+
+ <section>
+ <marker id="verify_hostname_examples"></marker>
+ <title>Function call examples</title>
+ <note>
+ <p>Other applications like ssl/tls or https might have options that are passed
+ down to the <c>public_key:pkix_verify_hostname</c>. You will probably not
+ have to call it directly</p>
+ </note>
+ <p>Suppose our client expects to connect to the web server https://www.example.net. This
+ URI is therefore the Reference IDs of the client.
+ The call will be:
+ </p>
+ <code>
+ public_key:pkix_verify_hostname(CertFromHost,
+ [{uri_id, "https://www.example.net"}
+ ]).
+ </code>
+ <p>The call will return <c>true</c> or <c>false</c> depending on the check. The caller
+ do not need to handle the matching rules in the rfc. The matching will proceed as:
+ </p>
+ <list>
+ <item>If there is a <c>Subject Alternate Name</c> field, the <c>{uri_id,string()}</c> in the
+ function call will be compared to any
+ <c>{uniformResourceIdentifier,string()}</c> in the Certificate field.
+ If the two <c>strings()</c> are equal (case insensitive), there is a match.
+ The same applies for any <c>{dns_id,string()}</c> in the call which is compared
+ with all <c>{dNSName,string()}</c> in the Certificate field.
+ </item>
+ <item>If there is NO <c>Subject Alternate Name</c> field, the <c>Subject</c> field will be
+ checked. All <c>CN</c> names will be compared to all hostnames <i>extracted</i> from
+ <c>{uri_id,string()}</c> and from <c>{dns_id,string()}</c>.
+ </item>
+ </list>
+ </section>
+ <section>
+ <title>Extending the search mechanism</title>
+ <p>The caller can use own extraction and matching rules. This is done with the two options
+ <c>fqdn_fun</c> and <c>match_fun</c>.
+ </p>
+ <section>
+ <title>Hostname extraction</title>
+ <p>The <c>fqdn_fun</c> extracts hostnames (Fully Qualified Domain Names) from uri_id
+ or other ReferenceIDs that are not pre-defined in the public_key function.
+ Suppose you have some URI with a very special protocol-part:
+ <c>myspecial://example.com"</c>. Since this a non-standard URI there will be no hostname
+ extracted for matching CN-names in the <c>Subject</c>.</p>
+ <p>To "teach" the function how to extract, you can give a fun which replaces the default
+ extraction function.
+ The <c>fqdn_fun</c> takes one argument and returns
+ either a <c>string()</c> to be matched to each CN-name or the atom <c>default</c> which will invoke
+ the default fqdn extraction function. The return value <c>undefined</c> removes the current
+ URI from the fqdn extraction.
+ </p>
+ <code>
+ ...
+ Extract = fun({uri_id, "myspecial://"++HostName}) -> HostName;
+ (_Else) -> default
+ end,
+ ...
+ public_key:pkix_verify_hostname(CertFromHost, RefIDs,
+ [{fqdn_fun, Extract}])
+ ...
+ </code>
+ </section>
+ <section>
+ <title>Re-defining the match operations</title>
+ <p>The default matching handles dns_id and uri_id. In an uri_id the value is tested for
+ equality with a value from the <c>Subject Alternate Name</c>. If som other kind of matching
+ is needed, use the <c>match_fun</c> option.
+ </p>
+ <p>The <c>match_fun</c> takes two arguments and returns either <c>true</c>,
+ <c>false</c> or <c>default</c>. The value <c>default</c> will invoke the default
+ match function.
+ </p>
+ <code>
+ ...
+ Match = fun({uri_id,"myspecial://"++A},
+ {uniformResourceIdentifier,"myspecial://"++B}) ->
+ my_match(A,B);
+ (_RefID, _PresentedID) ->
+ default
+ end,
+ ...
+ public_key:pkix_verify_hostname(CertFromHost, RefIDs,
+ [{match_fun, Match}]),
+ ...
+ </code>
+ <p>In case of a match operation between a ReferenceID and a CN value from the <c>Subject</c>
+ field, the first argument to the fun is the extracted hostname from the ReferenceID, and the
+ second argument is the tuple <c>{cn, string()}</c> taken from the <c>Subject</c> field. That
+ makes it possible to have separate matching rules for Presented IDs from the <c>Subject</c>
+ field and from the <c>Subject Alternate Name</c> field.
+ </p>
+ <p>The default matching transformes the ascii values in strings to lowercase before comparing.
+ The <c>match_fun</c> is however called without any transfomation applied to the strings. The
+ reason is to enable the user to do unforseen handling of the strings where the original format
+ is needed.
+ </p>
+ </section>
+ </section>
+ <section>
+ <title>"Pinning" a Certificate</title>
+ <p>The <url href="https://tools.ietf.org/html/rfc6125">RFC 6125</url> defines <i>pinning</i>
+ as:</p>
+ <quote>
+ <p>"The act of establishing a cached name association between
+ the application service's certificate and one of the client's
+ reference identifiers, despite the fact that none of the presented
+ identifiers matches the given reference identifier. ..."
+ </p>
+ </quote>
+ <p>The purpose is to have a mechanism for a human to accept an otherwise faulty Certificate.
+ In for example a web browser, you could get a question like </p>
+ <quote>
+ <p>Warning: you wanted to visit the site www.example.com,
+ but the certificate is for shop.example.com. Accept anyway (yes/no)?"
+ </p>
+ </quote>
+ <p>This could be accomplished with the option <c>fail_callback</c> which will
+ be called if the hostname verification fails:
+ </p>
+ <code>
+ -include_lib("public_key/include/public_key.hrl"). % Record def
+ ...
+ Fail = fun(#'OTPCertificate'{}=C) ->
+ case in_my_cache(C) orelse my_accept(C) of
+ true ->
+ enter_my_cache(C),
+ true;
+ false ->
+ false
+ end,
+ ...
+ public_key:pkix_verify_hostname(CertFromHost, RefIDs,
+ [{fail_callback, Fail}]),
+ ...
+ </code>
+ </section>
+ </section>
+
<section>
<title>SSH Files</title>
diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl
index 441a6e98d1..8f185bbbd4 100644
--- a/lib/public_key/src/public_key.erl
+++ b/lib/public_key/src/public_key.erl
@@ -48,6 +48,7 @@
pkix_issuer_id/2,
pkix_normalize_name/1,
pkix_path_validation/3,
+ pkix_verify_hostname/2, pkix_verify_hostname/3,
ssh_decode/2, ssh_encode/2,
ssh_hostkey_fingerprint/1, ssh_hostkey_fingerprint/2,
ssh_curvename2oid/1, oid2ssh_curvename/1,
@@ -811,6 +812,76 @@ pkix_crls_validate(OtpCert, DPAndCRLs0, Options) ->
pkix_crls_validate(OtpCert, DPAndCRLs, DPAndCRLs,
Options, pubkey_crl:init_revokation_state()).
+%--------------------------------------------------------------------
+-spec pkix_verify_hostname(Cert :: #'OTPCertificate'{} | binary(),
+ ReferenceIDs :: [{uri_id | dns_id | oid(), string()}]) -> boolean().
+
+-spec pkix_verify_hostname(Cert :: #'OTPCertificate'{} | binary(),
+ ReferenceIDs :: [{uri_id | dns_id | oid(), string()}],
+ Options :: proplists:proplist()) -> boolean().
+
+%% Description: Validates a hostname to RFC 6125
+%%--------------------------------------------------------------------
+pkix_verify_hostname(Cert, ReferenceIDs) ->
+ pkix_verify_hostname(Cert, ReferenceIDs, []).
+
+pkix_verify_hostname(BinCert, ReferenceIDs, Options) when is_binary(BinCert) ->
+ pkix_verify_hostname(pkix_decode_cert(BinCert,otp), ReferenceIDs, Options);
+
+pkix_verify_hostname(Cert = #'OTPCertificate'{tbsCertificate = TbsCert}, ReferenceIDs0, Opts) ->
+ MatchFun = proplists:get_value(match_fun, Opts, undefined),
+ FailCB = proplists:get_value(fail_callback, Opts, fun(_Cert) -> false end),
+ FqdnFun = proplists:get_value(fqdn_fun, Opts, fun verify_hostname_extract_fqdn_default/1),
+
+ ReferenceIDs = [{T,to_string(V)} || {T,V} <- ReferenceIDs0],
+ PresentedIDs =
+ try lists:keyfind(?'id-ce-subjectAltName',
+ #'Extension'.extnID,
+ TbsCert#'OTPTBSCertificate'.extensions)
+ of
+ #'Extension'{extnValue = ExtVals} ->
+ [{T,to_string(V)} || {T,V} <- ExtVals];
+ false ->
+ []
+ catch
+ _:_ -> []
+ end,
+ %% PresentedIDs example: [{dNSName,"ewstest.ericsson.com"}, {dNSName,"www.ericsson.com"}]}
+ case PresentedIDs of
+ [] ->
+ %% Fallback to CN-ids [rfc6125, ch6]
+ case TbsCert#'OTPTBSCertificate'.subject of
+ {rdnSequence,RDNseq} ->
+ PresentedCNs =
+ [{cn, to_string(V)}
+ || ATVs <- RDNseq, % RDNseq is list-of-lists
+ #'AttributeTypeAndValue'{type = ?'id-at-commonName',
+ value = {_T,V}} <- ATVs
+ % _T = kind of string (teletexString etc)
+ ],
+ %% Example of PresentedCNs: [{cn,"www.ericsson.se"}]
+ %% match ReferenceIDs to PresentedCNs
+ verify_hostname_match_loop(verify_hostname_fqnds(ReferenceIDs, FqdnFun),
+ PresentedCNs,
+ MatchFun, FailCB, Cert);
+
+ _ ->
+ false
+ end;
+ _ ->
+ %% match ReferenceIDs to PresentedIDs
+ case verify_hostname_match_loop(ReferenceIDs, PresentedIDs,
+ MatchFun, FailCB, Cert) of
+ false ->
+ %% Try to extract DNS-IDs from URIs etc
+ DNS_ReferenceIDs =
+ [{dns_is,X} || X <- verify_hostname_fqnds(ReferenceIDs, FqdnFun)],
+ verify_hostname_match_loop(DNS_ReferenceIDs, PresentedIDs,
+ MatchFun, FailCB, Cert);
+ true ->
+ true
+ end
+ end.
%%--------------------------------------------------------------------
-spec ssh_decode(binary(), public_key | ssh_file()) -> [{public_key(), Attributes::list()}]
@@ -870,21 +941,31 @@ oid2ssh_curvename(?'secp521r1') -> <<"nistp521">>.
%%--------------------------------------------------------------------
-spec ssh_hostkey_fingerprint(public_key()) -> string().
--spec ssh_hostkey_fingerprint(digest_type(), public_key()) -> string().
+-spec ssh_hostkey_fingerprint( digest_type(), public_key()) -> string()
+ ; ([digest_type()], public_key()) -> [string()]
+ .
ssh_hostkey_fingerprint(Key) ->
- sshfp_string(md5, Key).
+ sshfp_string(md5, public_key:ssh_encode(Key,ssh2_pubkey) ).
-ssh_hostkey_fingerprint(HashAlg, Key) ->
- lists:concat([sshfp_alg_name(HashAlg),
- [$: | sshfp_string(HashAlg, Key)]
- ]).
+ssh_hostkey_fingerprint(HashAlgs, Key) when is_list(HashAlgs) ->
+ EncKey = public_key:ssh_encode(Key, ssh2_pubkey),
+ [sshfp_full_string(HashAlg,EncKey) || HashAlg <- HashAlgs];
+ssh_hostkey_fingerprint(HashAlg, Key) when is_atom(HashAlg) ->
+ EncKey = public_key:ssh_encode(Key, ssh2_pubkey),
+ sshfp_full_string(HashAlg, EncKey).
-sshfp_string(HashAlg, Key) ->
+
+sshfp_string(HashAlg, EncodedKey) ->
%% Other HashAlgs than md5 will be printed with
%% other formats than hextstr by
%% ssh-keygen -E <alg> -lf <file>
- fp_fmt(sshfp_fmt(HashAlg), crypto:hash(HashAlg, public_key:ssh_encode(Key,ssh2_pubkey))).
+ fp_fmt(sshfp_fmt(HashAlg), crypto:hash(HashAlg, EncodedKey)).
+
+sshfp_full_string(HashAlg, EncKey) ->
+ lists:concat([sshfp_alg_name(HashAlg),
+ [$: | sshfp_string(HashAlg, EncKey)]
+ ]).
sshfp_alg_name(sha) -> "SHA1";
sshfp_alg_name(Alg) -> string:to_upper(atom_to_list(Alg)).
@@ -1165,8 +1246,11 @@ ec_curve_spec( #'ECParameters'{fieldID = FieldId, curve = PCurve, base = Base, o
FieldId#'FieldID'.parameters},
Curve = {PCurve#'Curve'.a, PCurve#'Curve'.b, none},
{Field, Curve, Base, Order, CoFactor};
-ec_curve_spec({namedCurve, OID}) ->
- pubkey_cert_records:namedCurves(OID).
+ec_curve_spec({namedCurve, OID}) when is_tuple(OID), is_integer(element(1,OID)) ->
+ ec_curve_spec({namedCurve, pubkey_cert_records:namedCurves(OID)});
+ec_curve_spec({namedCurve, Name}) when is_atom(Name) ->
+ crypto:ec_curve(Name).
+
ec_key({PubKey, PrivateKey}, Params) ->
#'ECPrivateKey'{version = 1,
@@ -1245,3 +1329,96 @@ ascii_to_lower(String) ->
end)>>
||
<<C>> <= iolist_to_binary(String) >>.
+
+%%%----------------------------------------------------------------
+%%% pkix_verify_hostname help functions
+verify_hostname_extract_fqdn_default({dns_id,S}) ->
+ S;
+verify_hostname_extract_fqdn_default({uri_id,URI}) ->
+ {ok,{https,_,Host,_,_,_}} = http_uri:parse(URI),
+ Host.
+
+
+verify_hostname_fqnds(L, FqdnFun) ->
+ [E || E0 <- L,
+ E <- [try case FqdnFun(E0) of
+ default -> verify_hostname_extract_fqdn_default(E0);
+ undefined -> undefined; % will make the "is_list(E)" test fail
+ Other -> Other
+ end
+ catch _:_-> undefined % will make the "is_list(E)" test fail
+ end],
+ is_list(E),
+ E =/= "",
+ {error,einval} == inet:parse_address(E)
+ ].
+
+
+-define(srvName_OID, {1,3,6,1,4,1,434,2,2,1,37,0}).
+
+verify_hostname_match_default(Ref, Pres) ->
+ verify_hostname_match_default0(to_lower_ascii(Ref), to_lower_ascii(Pres)).
+
+verify_hostname_match_default0(FQDN=[_|_], {cn,FQDN}) ->
+ not lists:member($*, FQDN);
+verify_hostname_match_default0(FQDN=[_|_], {cn,Name=[_|_]}) ->
+ [F1|Fs] = string:tokens(FQDN, "."),
+ [N1|Ns] = string:tokens(Name, "."),
+ match_wild(F1,N1) andalso Fs==Ns;
+verify_hostname_match_default0({dns_id,R}, {dNSName,P}) ->
+ R==P;
+verify_hostname_match_default0({uri_id,R}, {uniformResourceIdentifier,P}) ->
+ R==P;
+verify_hostname_match_default0({srv_id,R}, {T,P}) when T == srvName ;
+ T == ?srvName_OID ->
+ R==P;
+verify_hostname_match_default0(_, _) ->
+ false.
+
+
+match_wild(A, [$*|B]) -> match_wild_suffixes(A, B);
+match_wild([C|A], [ C|B]) -> match_wild(A, B);
+match_wild([], []) -> true;
+match_wild(_, _) -> false.
+
+%% Match the parts after the only wildcard by comparing them from the end
+match_wild_suffixes(A, B) -> match_wild_sfx(lists:reverse(A), lists:reverse(B)).
+
+match_wild_sfx([$*|_], _) -> false; % Bad name (no wildcards alowed)
+match_wild_sfx(_, [$*|_]) -> false; % Bad pattern (no more wildcards alowed)
+match_wild_sfx([A|Ar], [A|Br]) -> match_wild_sfx(Ar, Br);
+match_wild_sfx(Ar, []) -> not lists:member($*, Ar); % Chk for bad name (= wildcards)
+match_wild_sfx(_, _) -> false.
+
+
+verify_hostname_match_loop(Refs0, Pres0, undefined, FailCB, Cert) ->
+ Pres = lists:map(fun to_lower_ascii/1, Pres0),
+ Refs = lists:map(fun to_lower_ascii/1, Refs0),
+ lists:any(
+ fun(R) ->
+ lists:any(fun(P) ->
+ verify_hostname_match_default(R,P) orelse FailCB(Cert)
+ end, Pres)
+ end, Refs);
+verify_hostname_match_loop(Refs, Pres, MatchFun, FailCB, Cert) ->
+ lists:any(
+ fun(R) ->
+ lists:any(fun(P) ->
+ (case MatchFun(R,P) of
+ default -> verify_hostname_match_default(R,P);
+ Bool -> Bool
+ end) orelse FailCB(Cert)
+ end,
+ Pres)
+ end,
+ Refs).
+
+
+to_lower_ascii(S) when is_list(S) -> lists:map(fun to_lower_ascii/1, S);
+to_lower_ascii({T,S}) -> {T, to_lower_ascii(S)};
+to_lower_ascii(C) when $A =< C,C =< $Z -> C + ($a-$A);
+to_lower_ascii(C) -> C.
+
+to_string(S) when is_list(S) -> S;
+to_string(B) when is_binary(B) -> binary_to_list(B).
+
diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl
index cd24819899..68aa152911 100644
--- a/lib/public_key/test/public_key_SUITE.erl
+++ b/lib/public_key/test/public_key_SUITE.erl
@@ -45,13 +45,17 @@ all() ->
{group, sign_verify},
pkix, pkix_countryname, pkix_emailaddress, pkix_path_validation,
pkix_iso_rsa_oid, pkix_iso_dsa_oid, pkix_crl, general_name,
+ pkix_verify_hostname_cn,
+ pkix_verify_hostname_subjAltName,
+ pkix_verify_hostname_options,
short_cert_issuer_hash, short_crl_issuer_hash,
ssh_hostkey_fingerprint_md5_implicit,
ssh_hostkey_fingerprint_md5,
ssh_hostkey_fingerprint_sha,
ssh_hostkey_fingerprint_sha256,
ssh_hostkey_fingerprint_sha384,
- ssh_hostkey_fingerprint_sha512
+ ssh_hostkey_fingerprint_sha512,
+ ssh_hostkey_fingerprint_list
].
groups() ->
@@ -90,20 +94,21 @@ end_per_group(_GroupName, Config) ->
%%-------------------------------------------------------------------
init_per_testcase(TestCase, Config) ->
case TestCase of
- ssh_hostkey_fingerprint_md5_implicit -> init_fingerprint_testcase(md5, Config);
- ssh_hostkey_fingerprint_md5 -> init_fingerprint_testcase(md5, Config);
- ssh_hostkey_fingerprint_sha -> init_fingerprint_testcase(sha, Config);
- ssh_hostkey_fingerprint_sha256 -> init_fingerprint_testcase(sha256, Config);
- ssh_hostkey_fingerprint_sha384 -> init_fingerprint_testcase(sha384, Config);
- ssh_hostkey_fingerprint_sha512 -> init_fingerprint_testcase(sha512, Config);
+ ssh_hostkey_fingerprint_md5_implicit -> init_fingerprint_testcase([md5], Config);
+ ssh_hostkey_fingerprint_md5 -> init_fingerprint_testcase([md5], Config);
+ ssh_hostkey_fingerprint_sha -> init_fingerprint_testcase([sha], Config);
+ ssh_hostkey_fingerprint_sha256 -> init_fingerprint_testcase([sha256], Config);
+ ssh_hostkey_fingerprint_sha384 -> init_fingerprint_testcase([sha384], Config);
+ ssh_hostkey_fingerprint_sha512 -> init_fingerprint_testcase([sha512], Config);
+ ssh_hostkey_fingerprint_list -> init_fingerprint_testcase([sha,md5], Config);
_ -> init_common_per_testcase(Config)
end.
-init_fingerprint_testcase(Alg, Config) ->
- CryptoSupports = lists:member(Alg, proplists:get_value(hashs, crypto:supports())),
- case CryptoSupports of
- false -> {skip,{Alg,not_supported}};
- true -> init_common_per_testcase(Config)
+init_fingerprint_testcase(Algs, Config) ->
+ Hashs = proplists:get_value(hashs, crypto:supports(), []),
+ case Algs -- Hashs of
+ [] -> init_common_per_testcase(Config);
+ UnsupportedAlgs -> {skip,{UnsupportedAlgs,not_supported}}
end.
init_common_per_testcase(Config0) ->
@@ -597,6 +602,14 @@ ssh_hostkey_fingerprint_sha512(_Config) ->
Expected = public_key:ssh_hostkey_fingerprint(sha512, ssh_hostkey(rsa)).
%%--------------------------------------------------------------------
+%% Since this kind of fingerprint is not available yet on standard
+%% distros, we do like this instead.
+ssh_hostkey_fingerprint_list(_Config) ->
+ Expected = ["SHA1:Soammnaqg06jrm2jivMSnzQGlmk",
+ "MD5:4b:0b:63:de:0f:a7:3a:ab:2c:cc:2d:d1:21:37:1d:3a"],
+ Expected = public_key:ssh_hostkey_fingerprint([sha,md5], ssh_hostkey(rsa)).
+
+%%--------------------------------------------------------------------
encrypt_decrypt() ->
[{doc, "Test public_key:encrypt_private and public_key:decrypt_public"}].
encrypt_decrypt(Config) when is_list(Config) ->
@@ -814,6 +827,114 @@ pkix_path_validation(Config) when is_list(Config) ->
ok.
%%--------------------------------------------------------------------
+%% To generate the PEM file contents:
+%%
+%% openssl req -x509 -nodes -newkey rsa:1024 -keyout /dev/null -subj '/C=SE/CN=example.com/CN=*.foo.example.com/CN=a*b.bar.example.com/O=erlang.org' > public_key_SUITE_data/pkix_verify_hostname_cn.pem
+%%
+%% Note that the same pem-file is used in pkix_verify_hostname_options/1
+%%
+%% Subject: C=SE, CN=example.com, CN=*.foo.example.com, CN=a*b.bar.example.com, O=erlang.org
+%% extensions = no subjAltName
+
+pkix_verify_hostname_cn(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ {ok,Bin} = file:read_file(filename:join(DataDir,"pkix_verify_hostname_cn.pem")),
+ Cert = public_key:pkix_decode_cert(element(2,hd(public_key:pem_decode(Bin))), otp),
+
+ %% Check that 1) only CNs are checked,
+ %% 2) an empty label does not match a wildcard and
+ %% 3) a wildcard does not match more than one label
+ false = public_key:pkix_verify_hostname(Cert, [{dns_id,"erlang.org"},
+ {dns_id,"foo.EXAMPLE.com"},
+ {dns_id,"b.a.foo.EXAMPLE.com"}]),
+
+ %% Check that a hostname is extracted from a https-uri and used for checking:
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"HTTPS://EXAMPLE.com"}]),
+
+ %% Check wildcard matching one label:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"a.foo.EXAMPLE.com"}]),
+
+ %% Check wildcard with surrounding chars matches one label:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"accb.bar.EXAMPLE.com"}]),
+
+ %% Check that a wildcard with surrounding chars matches an empty string:
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"https://ab.bar.EXAMPLE.com"}]).
+
+%%--------------------------------------------------------------------
+%% To generate the PEM file contents:
+%%
+%% openssl req -x509 -nodes -newkey rsa:1024 -keyout /dev/null -extensions SAN -config public_key_SUITE_data/verify_hostname.conf 2>/dev/null > public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem
+%%
+%% Subject: C=SE, CN=example.com
+%% Subject Alternative Name: DNS:kb.example.org, URI:http://www.example.org, URI:https://wws.example.org
+
+pkix_verify_hostname_subjAltName(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ {ok,Bin} = file:read_file(filename:join(DataDir,"pkix_verify_hostname_subjAltName.pem")),
+ Cert = public_key:pkix_decode_cert(element(2,hd(public_key:pem_decode(Bin))), otp),
+
+ %% Check that neither a uri nor dns hostname matches a CN if subjAltName is present:
+ false = public_key:pkix_verify_hostname(Cert, [{uri_id,"https://example.com"},
+ {dns_id,"example.com"}]),
+
+ %% Check that a uri_id matches a URI subjAltName:
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"https://wws.example.org"}]),
+
+ %% Check that a dns_id does not match a URI subjAltName:
+ false = public_key:pkix_verify_hostname(Cert, [{dns_id,"www.example.org"},
+ {dns_id,"wws.example.org"}]),
+
+ %% Check that a dns_id matches a DNS subjAltName:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"kb.example.org"}]).
+
+%%--------------------------------------------------------------------
+%% Uses the pem-file for pkix_verify_hostname_cn
+%% Subject: C=SE, CN=example.com, CN=*.foo.example.com, CN=a*b.bar.example.com, O=erlang.org
+pkix_verify_hostname_options(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ {ok,Bin} = file:read_file(filename:join(DataDir,"pkix_verify_hostname_cn.pem")),
+ Cert = public_key:pkix_decode_cert(element(2,hd(public_key:pem_decode(Bin))), otp),
+
+ %% Check that the fail_callback is called and is presented the correct certificate:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"erlang.org"}],
+ [{fail_callback,
+ fun(#'OTPCertificate'{}=C) when C==Cert ->
+ true; % To test the return value matters
+ (#'OTPCertificate'{}=C) ->
+ ct:log("~p:~p: Wrong cert:~n~p~nExpect~n~p",
+ [?MODULE, ?LINE, C, Cert]),
+ ct:fail("Wrong cert, see log");
+ (C) ->
+ ct:log("~p:~p: Bad cert: ~p",[?MODULE,?LINE,C]),
+ ct:fail("Bad cert, see log")
+ end}]),
+
+ %% Check the callback for user-provided match functions:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"very.wrong.domain"}],
+ [{match_fun,
+ fun("very.wrong.domain", {cn,"example.com"}) ->
+ true;
+ (_, _) ->
+ false
+ end}]),
+ false = public_key:pkix_verify_hostname(Cert, [{dns_id,"not.example.com"}],
+ [{match_fun, fun(_, _) -> default end}]),
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"example.com"}],
+ [{match_fun, fun(_, _) -> default end}]),
+
+ %% Check the callback for user-provided fqdn extraction:
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"some://very.wrong.domain"}],
+ [{fqdn_fun,
+ fun({uri_id, "some://very.wrong.domain"}) ->
+ "example.com";
+ (_) ->
+ ""
+ end}]),
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"https://example.com"}],
+ [{fqdn_fun, fun(_) -> default end}]),
+ false = public_key:pkix_verify_hostname(Cert, [{uri_id,"some://very.wrong.domain"}]).
+
+%%--------------------------------------------------------------------
pkix_iso_rsa_oid() ->
[{doc, "Test workaround for supporting certs that use ISO oids"
" 1.3.14.3.2.29 instead of PKIX/PKCS oid"}].
diff --git a/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_cn.pem b/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_cn.pem
new file mode 100644
index 0000000000..9f7b428f9a
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_cn.pem
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE-----
+MIICsjCCAhugAwIBAgIJAMCGx1ezaJFRMA0GCSqGSIb3DQEBCwUAMHIxCzAJBgNV
+BAYTAlNFMRQwEgYDVQQDDAtleGFtcGxlLmNvbTEaMBgGA1UEAwwRKi5mb28uZXhh
+bXBsZS5jb20xHDAaBgNVBAMME2EqYi5iYXIuZXhhbXBsZS5jb20xEzARBgNVBAoM
+CmVybGFuZy5vcmcwHhcNMTYxMjIwMTUwNDUyWhcNMTcwMTE5MTUwNDUyWjByMQsw
+CQYDVQQGEwJTRTEUMBIGA1UEAwwLZXhhbXBsZS5jb20xGjAYBgNVBAMMESouZm9v
+LmV4YW1wbGUuY29tMRwwGgYDVQQDDBNhKmIuYmFyLmV4YW1wbGUuY29tMRMwEQYD
+VQQKDAplcmxhbmcub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDVGJgZ
+defGucvMXf0RrEm6Hb18IfVUo9IV6swSP/kwAu/608ZIZdzlfp2pxC0e72a4E3WN
+4vrGxAr2wMMQOiyoy4qlAeLX27THJ6Q4Vl82fc6QuOJbScKIydSZ4KoB+luGlBu5
+b6xYh2pBbneKFpsecmK5rsWtTactjD4n1tKjUwIDAQABo1AwTjAdBgNVHQ4EFgQU
+OCtzidUeaDva7qp12T0CQrgfLW4wHwYDVR0jBBgwFoAUOCtzidUeaDva7qp12T0C
+QrgfLW4wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQCAz+ComCMo9Qbu
+PHxG7pv3mQvoxrMFva/Asg4o9mW2mDyrk0DwI4zU8vMHbSRKSBYGm4TATXsQkDQT
+gJw/bxhISnhZZtPC7Yup8kJCkJ6S6EDLYrlzgsRqfeU6jWim3nbfaLyMi9dHFDMk
+HULnyNNW3qxTEKi8Wo2sCMej4l7KFg==
+-----END CERTIFICATE-----
diff --git a/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem b/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem
new file mode 100644
index 0000000000..83e1ad37b3
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem
@@ -0,0 +1,14 @@
+-----BEGIN CERTIFICATE-----
+MIICEjCCAXugAwIBAgIJANwliLph5EiAMA0GCSqGSIb3DQEBCwUAMCMxCzAJBgNV
+BAYTAlNFMRQwEgYDVQQDEwtleGFtcGxlLmNvbTAeFw0xNjEyMjAxNTEyMjRaFw0x
+NzAxMTkxNTEyMjRaMCMxCzAJBgNVBAYTAlNFMRQwEgYDVQQDEwtleGFtcGxlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAydstIN157w8QxkVaOl3wm81j
+fgZ8gqO3BXkECPF6bw5ewLlmePL6Qs4RypsaRe7cKJ9rHFlwhpdcYkxWSWEt2N7Z
+Ry3N4SjuU04ohWbYgy3ijTt7bJg7jOV1Dh56BnI4hwhQj0oNFizNZOeRRfEzdMnS
++uk03t/Qre2NS7KbwnUCAwEAAaNOMEwwSgYDVR0RBEMwQYIOa2IuZXhhbXBsZS5v
+cmeGFmh0dHA6Ly93d3cuZXhhbXBsZS5vcmeGF2h0dHBzOi8vd3dzLmV4YW1wbGUu
+b3JnMA0GCSqGSIb3DQEBCwUAA4GBAKqFqW5gCso422bXriCBJoygokOTTOw1Rzpq
+K8Mm0B8W9rrW9OTkoLEcjekllZcUCZFin2HovHC5HlHZz+mQvBI1M6sN2HVQbSzS
+EgL66U9gwJVnn9/U1hXhJ0LO28aGbyE29DxnewNR741dWN3oFxCdlNaO6eMWaEsO
+gduJ5sDl
+-----END CERTIFICATE-----
diff --git a/lib/public_key/test/public_key_SUITE_data/verify_hostname.conf b/lib/public_key/test/public_key_SUITE_data/verify_hostname.conf
new file mode 100644
index 0000000000..a28864dc78
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/verify_hostname.conf
@@ -0,0 +1,16 @@
+[req]
+prompt = no
+distinguished_name = DN
+
+[DN]
+C=SE
+CN=example.com
+
+[SAN]
+subjectAltName = @alt_names
+
+[alt_names]
+DNS = kb.example.org
+URI.1 = http://www.example.org
+URI.2 = https://wws.example.org
+
diff --git a/lib/reltool/src/reltool.hrl b/lib/reltool/src/reltool.hrl
index 3b1e868757..c61c3a0c71 100644
--- a/lib/reltool/src/reltool.hrl
+++ b/lib/reltool/src/reltool.hrl
@@ -289,8 +289,8 @@
"^lib",
"^releases"]).
-define(EMBEDDED_EXCL_SYS_FILTERS,
- ["^bin/(erlc|dialyzer|typer)(|\\.exe)\$",
- "^erts.*/bin/(erlc|dialyzer|typer)(|\\.exe)\$",
+ ["^bin/(erlc|dialyzer)(|\\.exe)\$",
+ "^erts.*/bin/(erlc|dialyzer)(|\\.exe)\$",
"^erts.*/bin/.*(debug|pdb)"]).
-define(EMBEDDED_INCL_APP_FILTERS, ["^ebin",
"^include",
@@ -303,7 +303,7 @@
"^erts.*/bin",
"^lib\$"]).
-define(STANDALONE_EXCL_SYS_FILTERS,
- ["^erts.*/bin/(erlc|dialyzer|typer)(|\\.exe)\$",
+ ["^erts.*/bin/(erlc|dialyzer)(|\\.exe)\$",
"^erts.*/bin/(start|escript|to_erl|run_erl)(|\\.exe)\$",
"^erts.*/bin/.*(debug|pdb)"]).
-define(STANDALONE_INCL_APP_FILTERS, ["^ebin",
diff --git a/lib/runtime_tools/doc/src/LTTng.xml b/lib/runtime_tools/doc/src/LTTng.xml
index 82a4c79379..7aae5e5c41 100644
--- a/lib/runtime_tools/doc/src/LTTng.xml
+++ b/lib/runtime_tools/doc/src/LTTng.xml
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="utf8" ?>
+<?xml version="1.0" encoding="utf-8" ?>
<!DOCTYPE chapter SYSTEM "chapter.dtd">
<chapter>
<header>
diff --git a/lib/runtime_tools/src/Makefile b/lib/runtime_tools/src/Makefile
index 2c902952a1..0ef6b1c521 100644
--- a/lib/runtime_tools/src/Makefile
+++ b/lib/runtime_tools/src/Makefile
@@ -42,7 +42,6 @@ MODULES= \
runtime_tools_sup \
dbg \
dyntrace \
- percept_profile \
system_information \
observer_backend \
ttb_autostart\
diff --git a/lib/runtime_tools/src/percept_profile.erl b/lib/runtime_tools/src/percept_profile.erl
deleted file mode 100644
index 1e8e913b80..0000000000
--- a/lib/runtime_tools/src/percept_profile.erl
+++ /dev/null
@@ -1,195 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%%
-%% @doc Percept Collector
-%%
-%% This module provides the user interface for the percept data
-% collection (profiling).
-%%
-
--module(percept_profile).
--export([
- start/1,
- start/2,
- start/3,
- stop/0
- ]).
-
-
-%%==========================================================================
-%%
-%% Type definitions
-%%
-%%==========================================================================
-
-%% @type percept_option() = procs | ports | exclusive
-
--type percept_option() :: 'procs' | 'ports' | 'exclusive' | 'scheduler'.
-
-%%==========================================================================
-%%
-%% Interface functions
-%%
-%%==========================================================================
-
-%% @spec start(Filename::string()) -> {ok, Port} | {already_started, Port}
-%% @equiv start(Filename, [procs])
-
--spec start(Filename :: file:filename()) ->
- {'ok', port()} | {'already_started', port()}.
-
-start(Filename) ->
- profile_to_file(Filename, [procs]).
-
-%% @spec start(Filename::string(), [percept_option()]) -> {ok, Port} | {already_started, Port}
-%% Port = port()
-%% @doc Starts profiling with supplied options.
-%% All events are stored in the file given by Filename.
-%% An explicit call to stop/0 is needed to stop profiling.
-
--spec start(Filename :: file:filename(),
- Options :: [percept_option()]) ->
- {'ok', port()} | {'already_started', port()}.
-
-start(Filename, Options) ->
- profile_to_file(Filename, Options).
-
-%% @spec start(string(), MFA::mfa(), [percept_option()]) -> ok | {already_started, Port} | {error, not_started}
-%% Port = port()
-%% @doc Starts profiling at the entrypoint specified by the MFA. All events are collected,
-%% this means that processes outside the scope of the entry-point are also profiled.
-%% No explicit call to stop/0 is needed, the profiling stops when
-%% the entry function returns.
-
--spec start(Filename :: file:filename(),
- Entry :: {atom(), atom(), list()},
- Options :: [percept_option()]) ->
- 'ok' | {'already_started', port()} | {'error', 'not_started'}.
-
-start(Filename, {Module, Function, Args}, Options) ->
- case whereis(percept_port) of
- undefined ->
- {ok, _} = profile_to_file(Filename, Options),
- erlang:apply(Module, Function, Args),
- stop();
- Port ->
- {already_started, Port}
- end.
-
-deliver_all_trace() ->
- Tracee = self(),
- Tracer = spawn(fun() ->
- receive {Tracee, start} -> ok end,
- Ref = erlang:trace_delivered(Tracee),
- receive {trace_delivered, Tracee, Ref} -> Tracee ! {self(), ok} end
- end),
- erlang:trace(Tracee, true, [procs, {tracer, Tracer}]),
- Tracer ! {Tracee, start},
- receive {Tracer, ok} -> ok end,
- erlang:trace(Tracee, false, [procs]),
- ok.
-
-%% @spec stop() -> ok | {'error', 'not_started'}
-%% @doc Stops profiling.
-
--spec stop() -> 'ok' | {'error', 'not_started'}.
-
-stop() ->
- _ = erlang:system_profile(undefined, [runnable_ports, runnable_procs]),
- erlang:trace(all, false, [procs, ports, timestamp]),
- deliver_all_trace(),
- case whereis(percept_port) of
- undefined ->
- {error, not_started};
- Port ->
- erlang:port_command(Port, erlang:term_to_binary({profile_stop, erlang:timestamp()})),
- %% trace delivered?
- erlang:port_close(Port),
- ok
- end.
-
-%%==========================================================================
-%%
-%% Auxiliary functions
-%%
-%%==========================================================================
-
-profile_to_file(Filename, Opts) ->
- case whereis(percept_port) of
- undefined ->
- io:format("Starting profiling.~n", []),
-
- erlang:system_flag(multi_scheduling, block),
- Port = (dbg:trace_port(file, Filename))(),
- % Send start time
- erlang:port_command(Port, erlang:term_to_binary({profile_start, erlang:timestamp()})),
- erlang:system_flag(multi_scheduling, unblock),
-
- %% Register Port
- erlang:register(percept_port, Port),
- set_tracer(Port, Opts),
- {ok, Port};
- Port ->
- io:format("Profiling already started at port ~p.~n", [Port]),
- {already_started, Port}
- end.
-
-%% set_tracer
-
-set_tracer(Port, Opts) ->
- {TOpts, POpts} = parse_profile_options(Opts),
- % Setup profiling and tracing
- erlang:trace(all, true, [{tracer, Port}, timestamp | TOpts]),
- _ = erlang:system_profile(Port, POpts),
- ok.
-
-%% parse_profile_options
-
-parse_profile_options(Opts) ->
- parse_profile_options(Opts, {[],[]}).
-
-parse_profile_options([], Out) ->
- Out;
-parse_profile_options([Opt|Opts], {TOpts, POpts}) ->
- case Opt of
- procs ->
- parse_profile_options(Opts, {
- [procs | TOpts],
- [runnable_procs | POpts]
- });
- ports ->
- parse_profile_options(Opts, {
- [ports | TOpts],
- [runnable_ports | POpts]
- });
- scheduler ->
- parse_profile_options(Opts, {
- TOpts,
- [scheduler | POpts]
- });
- exclusive ->
- parse_profile_options(Opts, {
- TOpts,
- [exclusive | POpts]
- });
- _ ->
- parse_profile_options(Opts, {TOpts, POpts})
- end.
diff --git a/lib/runtime_tools/src/runtime_tools.app.src b/lib/runtime_tools/src/runtime_tools.app.src
index 690c61a4c3..d6c1f17e70 100644
--- a/lib/runtime_tools/src/runtime_tools.app.src
+++ b/lib/runtime_tools/src/runtime_tools.app.src
@@ -20,8 +20,8 @@
{application, runtime_tools,
[{description, "RUNTIME_TOOLS"},
{vsn, "%VSN%"},
- {modules, [appmon_info, dbg,observer_backend,percept_profile,
- runtime_tools,runtime_tools_sup,erts_alloc_config,
+ {modules, [appmon_info, dbg,observer_backend,runtime_tools,
+ runtime_tools_sup,erts_alloc_config,
ttb_autostart,dyntrace,system_information,
msacc]},
{registered, [runtime_tools_sup]},
@@ -30,5 +30,3 @@
{mod, {runtime_tools, []}},
{runtime_dependencies, ["stdlib-3.0","mnesia-4.12","kernel-5.0",
"erts-8.0"]}]}.
-
-
diff --git a/lib/sasl/doc/src/systools.xml b/lib/sasl/doc/src/systools.xml
index fa503fa573..4ca4a08329 100644
--- a/lib/sasl/doc/src/systools.xml
+++ b/lib/sasl/doc/src/systools.xml
@@ -268,7 +268,7 @@
<fsummary>Creates a release package.</fsummary>
<type>
<v>Name = string()</v>
- <v>Opt = {dirs,[IncDir]} | {path,[Dir]} | {variables,[Var]} | {var_tar,VarTar} | {erts,Dir} | src_tests | exref | {exref,[App]} | silent | {outdir,Dir}</v>
+ <v>Opt = {dirs,[IncDir]} | {path,[Dir]} | {variables,[Var]} | {var_tar,VarTar} | {erts,Dir} | src_tests | exref | {exref,[App]} | silent | {outdir,Dir} | | no_warn_sasl | warnings_as_errors</v>
<v>&nbsp;Dir = string()</v>
<v>&nbsp;IncDir = src | include | atom()</v>
<v>&nbsp;Var = {VarName,PreFix}</v>
@@ -297,6 +297,10 @@
directory unless <c>Name</c> contains a path. If option
<c>{outdir,Dir}</c> is specified, it is located in <c>Dir</c>
instead.</p>
+ <p>If SASL is not included as an application in
+ the <c>.rel</c> file, a warning is issued because such a
+ release cannot be used in an upgrade. To turn off this
+ warning, add option <c>no_warn_sasl</c>.</p>
<p>By default, the release package contains the directories
<c>lib/App-Vsn/ebin</c> and <c>lib/App-Vsn/priv</c> for each
included application. If more directories are to be included,
diff --git a/lib/sasl/src/release_handler.erl b/lib/sasl/src/release_handler.erl
index 1fcc9a0288..3250311b8f 100644
--- a/lib/sasl/src/release_handler.erl
+++ b/lib/sasl/src/release_handler.erl
@@ -831,7 +831,7 @@ do_unpack_release(Root, RelDir, ReleaseName, Releases) ->
Tar = filename:join(RelDir, ReleaseName ++ ".tar.gz"),
do_check_file(Tar, regular),
Rel = ReleaseName ++ ".rel",
- extract_rel_file(filename:join("releases", Rel), Tar, Root),
+ _ = extract_rel_file(filename:join("releases", Rel), Tar, Root),
RelFile = filename:join(RelDir, Rel),
Release = check_rel(Root, RelFile, false),
#release{vsn = Vsn} = Release,
@@ -1841,14 +1841,12 @@ do_check_file(Master, FileName, Type) ->
%% by the user in another way, i.e. ignore this here.
%%-----------------------------------------------------------------
extract_rel_file(Rel, Tar, Root) ->
- erl_tar:extract(Tar, [{files, [Rel]}, {cwd, Root}, compressed]).
+ _ = erl_tar:extract(Tar, [{files, [Rel]}, {cwd, Root}, compressed]).
extract_tar(Root, Tar) ->
case erl_tar:extract(Tar, [keep_old_files, {cwd, Root}, compressed]) of
ok ->
ok;
- {error, Reason, Name} -> % Old erl_tar.
- throw({error, {cannot_extract_file, Name, Reason}});
{error, {Name, Reason}} -> % New erl_tar (R3A).
throw({error, {cannot_extract_file, Name, Reason}})
end.
diff --git a/lib/sasl/src/systools_make.erl b/lib/sasl/src/systools_make.erl
index 6a16c8689e..f03b03dc08 100644
--- a/lib/sasl/src/systools_make.erl
+++ b/lib/sasl/src/systools_make.erl
@@ -94,7 +94,11 @@ make_script(RelName, Output, Flags) when is_list(RelName),
Warnings = wsasl(Flags, Warnings0),
case systools_lib:werror(Flags, Warnings) of
true ->
- return(ok,Warnings,Flags);
+ Warnings1 = [W || {warning,W}<-Warnings],
+ return({error,?MODULE,
+ {warnings_treated_as_errors,Warnings1}},
+ Warnings,
+ Flags);
false ->
case generate_script(Output,Release,Appls,Flags) of
ok ->
@@ -115,7 +119,6 @@ make_script(RelName, _Output, Flags) when is_list(Flags) ->
make_script(RelName, _Output, Flags) ->
badarg(Flags,[RelName, Flags]).
-
wsasl(Options, Warnings) ->
case lists:member(no_warn_sasl,Options) of
true -> lists:delete({warning,missing_sasl},Warnings);
@@ -148,21 +151,10 @@ get_outdir(Flags) ->
return(ok,Warnings,Flags) ->
case member(silent,Flags) of
true ->
- case systools_lib:werror(Flags, Warnings) of
- true ->
- error;
- false ->
- {ok,?MODULE,Warnings}
- end;
+ {ok,?MODULE,Warnings};
_ ->
- case member(warnings_as_errors,Flags) of
- true ->
- io:format("~ts",[format_warning(Warnings, true)]),
- error;
- false ->
- io:format("~ts",[format_warning(Warnings)]),
- ok
- end
+ io:format("~ts",[format_warning(Warnings)]),
+ ok
end;
return({error,Mod,Error},_,Flags) ->
case member(silent,Flags) of
@@ -300,6 +292,8 @@ add_apply_upgrade(Script,Args) ->
%% {variables,[{Name,AbsString}]}
%% {machine, jam | beam | vee}
%% {var_tar, include | ownfile | omit}
+%% no_warn_sasl
+%% warnings_as_errors
%%
%% The tar file contains:
%% lib/App-Vsn/ebin
@@ -332,13 +326,23 @@ make_tar(RelName, Flags) when is_list(RelName), is_list(Flags) ->
Path = make_set(Path1 ++ code:get_path()),
ModTestP = {member(src_tests, Flags),xref_p(Flags)},
case get_release(RelName, Path, ModTestP, machine(Flags)) of
- {ok, Release, Appls, Warnings} ->
- case catch mk_tar(RelName, Release, Appls, Flags, Path1) of
- ok ->
- return(ok,Warnings,Flags);
- Error ->
- return(Error,Warnings,Flags)
- end;
+ {ok, Release, Appls, Warnings0} ->
+ Warnings = wsasl(Flags, Warnings0),
+ case systools_lib:werror(Flags, Warnings) of
+ true ->
+ Warnings1 = [W || {warning,W}<-Warnings],
+ return({error,?MODULE,
+ {warnings_treated_as_errors,Warnings1}},
+ Warnings,
+ Flags);
+ false ->
+ case catch mk_tar(RelName, Release, Appls, Flags, Path1) of
+ ok ->
+ return(ok,Warnings,Flags);
+ Error ->
+ return(Error,Warnings,Flags)
+ end
+ end;
Error ->
return(Error,[],Flags)
end;
@@ -1904,8 +1908,10 @@ del_tar(Tar, TarName) ->
file:delete(TarName).
add_to_tar(Tar, FromFile, ToFile) ->
- case erl_tar:add(Tar, FromFile, ToFile, [compressed, dereference]) of
+ case catch erl_tar:add(Tar, FromFile, ToFile, [compressed, dereference]) of
ok -> ok;
+ {'EXIT', Reason} ->
+ throw({error, {tar_error, {add, FromFile, Reason}}});
{error, Error} ->
throw({error, {tar_error, {add, FromFile, Error}}})
end.
@@ -2113,90 +2119,80 @@ cas([Y | Args], X) ->
%% Check Options for make_tar
check_args_tar(Args) ->
- cat(Args, {undef, undef, undef, undef, undef, undef, undef, undef, undef, undef, []}).
+ cat(Args, []).
-cat([], {_Path,_Sil,_Dirs,_Erts,_Test,_Var,_VarTar,_Mach,_Xref,_XrefApps, X}) ->
+cat([], X) ->
X;
%%% path ---------------------------------------------------------------
-cat([{path, P} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(P) ->
+cat([{path, P} | Args], X) when is_list(P) ->
case check_path(P) of
ok ->
- cat(Args, {P, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+ cat(Args, X);
error ->
- cat(Args, {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X++[{path,P}]})
+ cat(Args, X++[{path,P}])
end;
%%% silent -------------------------------------------------------------
-cat([silent | Args], {Path, _Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, silent, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([silent | Args], X) ->
+ cat(Args, X);
%%% dirs ---------------------------------------------------------------
-cat([{dirs, D} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X}) ->
+cat([{dirs, D} | Args], X) ->
case check_dirs(D) of
ok ->
- cat(Args, {Path, Sil, D, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+ cat(Args, X);
error ->
- cat(Args, {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X++[{dirs, D}]})
+ cat(Args, X++[{dirs, D}])
end;
%%% erts ---------------------------------------------------------------
-cat([{erts, E} | Args], {Path, Sil, Dirs, _Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(E)->
- cat(Args, {Path, Sil, Dirs, E, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([{erts, E} | Args], X) when is_list(E)->
+ cat(Args, X);
%%% src_tests ----------------------------------------------------
-cat([src_tests | Args], {Path, Sil, Dirs, Erts, _Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, src_tests, Var, VarTar, Mach,
- Xref, XrefApps, X});
+cat([src_tests | Args], X) ->
+ cat(Args, X);
%%% variables ----------------------------------------------------------
-cat([{variables, V} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(V) ->
+cat([{variables, V} | Args], X) when is_list(V) ->
case check_vars(V) of
ok ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, V, VarTar, Mach, Xref, XrefApps, X});
+ cat(Args, X);
error ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach,
- Xref, XrefApps, X++[{variables, V}]})
+ cat(Args, X++[{variables, V}])
end;
%%% var_tar ------------------------------------------------------------
-cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == include ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, include, Mach, Xref, XrefApps, X});
-cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == ownfile ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, ownfile, Mach, Xref, XrefApps, X});
-cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == omit ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, omit, Mach, Xref, XrefApps, X});
+cat([{var_tar, VT} | Args], X) when VT == include;
+ VT == ownfile;
+ VT == omit ->
+ cat(Args, X);
%%% machine ------------------------------------------------------------
-cat([{machine, M} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X}) when is_atom(M) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([{machine, M} | Args], X) when is_atom(M) ->
+ cat(Args, X);
%%% exref --------------------------------------------------------------
-cat([exref | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, _Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, exref, XrefApps, X});
+cat([exref | Args], X) ->
+ cat(Args, X);
%%% exref Apps ---------------------------------------------------------
-cat([{exref, Apps} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(Apps) ->
+cat([{exref, Apps} | Args], X) when is_list(Apps) ->
case check_apps(Apps) of
ok ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach,
- Xref, Apps, X});
+ cat(Args, X);
error ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach,
- Xref, XrefApps, X++[{exref, Apps}]})
+ cat(Args, X++[{exref, Apps}])
end;
%%% outdir Dir ---------------------------------------------------------
-cat([{outdir, Dir} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(Dir) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach,
- Xref, XrefApps, X});
+cat([{outdir, Dir} | Args], X) when is_list(Dir) ->
+ cat(Args, X);
%%% otp_build (secret, not documented) ---------------------------------
-cat([otp_build | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([otp_build | Args], X) ->
+ cat(Args, X);
+%%% warnings_as_errors ----
+cat([warnings_as_errors | Args], X) ->
+ cat(Args, X);
+%%% no_warn_sasl ----
+cat([no_warn_sasl | Args], X) ->
+ cat(Args, X);
%%% no_module_tests (kept for backwards compatibility, but ignored) ----
-cat([no_module_tests | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([no_module_tests | Args], X) ->
+ cat(Args, X);
%%% ERROR --------------------------------------------------------------
-cat([Y | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X++[Y]}).
+cat([Y | Args], X) ->
+ cat(Args, X++[Y]).
check_path([]) ->
ok;
@@ -2296,6 +2292,9 @@ format_error({delete,File,Error}) ->
[File,file:format_error(Error)]);
format_error({tar_error,What}) ->
form_tar_err(What);
+format_error({warnings_treated_as_errors,Warnings}) ->
+ io_lib:format("Warnings being treated as errors:~n~ts",
+ [map(fun(W) -> form_warn("",W) end, Warnings)]);
format_error(ListOfErrors) when is_list(ListOfErrors) ->
format_errors(ListOfErrors);
format_error(E) -> io_lib:format("~p~n",[E]).
@@ -2352,24 +2351,15 @@ form_tar_err({add, File, Error}) ->
%% Format warning
format_warning(Warnings) ->
- format_warning(Warnings, false).
-
-format_warning(Warnings, Werror) ->
- Prefix = case Werror of
- true ->
- "";
- false ->
- "*WARNING* "
- end,
- map(fun({warning,W}) -> form_warn(Prefix, W) end, Warnings).
-
-form_warn(Prefix, {source_not_found,{Mod,_,App,_,_}}) ->
+ map(fun({warning,W}) -> form_warn("*WARNING* ", W) end, Warnings).
+
+form_warn(Prefix, {source_not_found,{Mod,App,_}}) ->
io_lib:format("~ts~w: Source code not found: ~w.erl~n",
[Prefix,App,Mod]);
form_warn(Prefix, {{parse_error, File},{_,_,App,_,_}}) ->
io_lib:format("~ts~w: Parse error: ~p~n",
[Prefix,App,File]);
-form_warn(Prefix, {obj_out_of_date,{Mod,_,App,_,_}}) ->
+form_warn(Prefix, {obj_out_of_date,{Mod,App,_}}) ->
io_lib:format("~ts~w: Object code (~w) out of date~n",
[Prefix,App,Mod]);
form_warn(Prefix, {exref_undef, Undef}) ->
@@ -2379,8 +2369,8 @@ form_warn(Prefix, {exref_undef, Undef}) ->
end,
map(F, Undef);
form_warn(Prefix, missing_sasl) ->
- io_lib:format("~ts: Missing application sasl. "
+ io_lib:format("~tsMissing application sasl. "
"Can not upgrade with this release~n",
[Prefix]);
form_warn(Prefix, What) ->
- io_lib:format("~ts ~p~n", [Prefix,What]).
+ io_lib:format("~ts~p~n", [Prefix,What]).
diff --git a/lib/sasl/src/systools_relup.erl b/lib/sasl/src/systools_relup.erl
index 28534dc0c8..7e1844b400 100644
--- a/lib/sasl/src/systools_relup.erl
+++ b/lib/sasl/src/systools_relup.erl
@@ -155,36 +155,12 @@ mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs) ->
mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs, Opts) ->
case check_opts(Opts) of
[] ->
- R = (catch do_mk_relup(TopRelFile,BaseUpRelDcs,BaseDnRelDcs,
- add_code_path(Opts), Opts)),
- case {get_opt(silent, Opts), get_opt(noexec, Opts)} of
- {false, false} ->
- case R of
- {ok, _Res, _Mod, Ws} ->
- print_warnings(Ws, Opts),
- case systools_lib:werror(Opts, Ws) of
- true ->
- error;
- false ->
- ok
- end;
- Other ->
- print_error(Other),
- error
- end;
- _ ->
- case R of
- {ok, _Res, _Mod, Ws} ->
- case systools_lib:werror(Opts, Ws) of
- true ->
- error;
- false ->
- R
- end;
- R ->
- R
- end
- end;
+ R = try do_mk_relup(TopRelFile,BaseUpRelDcs,BaseDnRelDcs,
+ add_code_path(Opts), Opts)
+ catch throw:Error ->
+ Error
+ end,
+ done_mk_relup(Opts, R);
BadArg ->
erlang:error({badarg, BadArg})
end.
@@ -224,17 +200,45 @@ do_mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs, Path, Opts) ->
{Dn, Ws2} = foreach_baserel_dn(TopRel, TopApps, BaseDnRelDcs,
Path, Opts, Ws1),
Relup = {TopRel#release.vsn, Up, Dn},
- case systools_lib:werror(Opts, Ws2) of
- true ->
- ok;
- false ->
- write_relup_file(Relup, Opts)
- end,
- {ok, Relup, ?MODULE, Ws2};
+
+ {ok, Relup, Ws2};
Other ->
- throw(Other)
+ Other
end.
+done_mk_relup(Opts, {ok,Relup,Ws}) ->
+ WAE = get_opt(warnings_as_errors,Opts),
+ Silent = get_opt(silent,Opts),
+ Noexec = get_opt(noexec,Opts),
+
+ if WAE andalso Ws=/=[] ->
+ return_error(Silent,
+ {error,?MODULE,{warnings_treated_as_errors, Ws}});
+ not Noexec ->
+ case write_relup_file(Relup,Opts) of
+ ok ->
+ return_ok(Silent,Relup,Ws);
+ Error ->
+ return_error(Silent,Error)
+ end;
+ true -> % noexec
+ return_ok(true,Relup,Ws)
+ end;
+done_mk_relup(Opts, Error) ->
+ return_error(get_opt(silent,Opts) orelse get_opt(noexec,Opts), Error).
+
+return_error(true, Error) ->
+ Error;
+return_error(false, Error) ->
+ print_error(Error),
+ error.
+
+return_ok(true,Relup,Ws) ->
+ {ok,Relup,?MODULE,Ws};
+return_ok(false,_Relup,Ws) ->
+ print_warnings(Ws),
+ ok.
+
%%-----------------------------------------------------------------
%% foreach_baserel_up(Rel, TopApps, BaseRelDcs, Path, Opts, Ws) -> Ret
%% foreach_baserel_dn(Rel, TopApps, BaseRelDcs, Path, Opts, Ws) -> Ret
@@ -529,33 +533,18 @@ to_list(X) when is_list(X) -> X.
%% Writes a relup file.
%%
write_relup_file(Relup, Opts) ->
- case get_opt(noexec, Opts) of
- true ->
- ok;
- _ ->
- Filename = case get_opt(outdir, Opts) of
- OutDir when is_list(OutDir) ->
- filename:join(filename:absname(OutDir),
- "relup");
- false ->
- "relup";
- Badarg ->
- throw({error, ?MODULE, {badarg, {outdir,Badarg}}})
- end,
-
- case file:open(Filename, [write]) of
- {ok, Fd} ->
- io:format(Fd, "~p.~n", [Relup]),
- case file:close(Fd) of
- ok -> ok;
- {error,Reason} ->
- throw({error, ?MODULE,
- {file_problem, {"relup", {close,Reason}}}})
- end;
- {error, Reason} ->
- throw({error, ?MODULE,
- {file_problem, {"relup", {open, Reason}}}})
- end
+ Filename = filename:join(filename:absname(get_opt(outdir,Opts)),
+ "relup"),
+ case file:open(Filename, [write]) of
+ {ok, Fd} ->
+ io:format(Fd, "~p.~n", [Relup]),
+ case file:close(Fd) of
+ ok -> ok;
+ {error,Reason} ->
+ {error, ?MODULE, {file_problem, {"relup", {close,Reason}}}}
+ end;
+ {error, Reason} ->
+ {error, ?MODULE, {file_problem, {"relup", {open, Reason}}}}
end.
add_code_path(Opts) ->
@@ -593,10 +582,9 @@ default(path) -> false;
default(noexec) -> false;
default(silent) -> false;
default(restart_emulator) -> false;
-default(outdir) -> false.
+default(outdir) -> ".";
+default(warnings_as_errors) -> false.
-print_error({'EXIT', Err}) ->
- print_error(Err);
print_error({error, Mod, Error}) ->
S = apply(Mod, format_error, [Error]),
io:format(S, []);
@@ -614,24 +602,20 @@ format_error({missing_sasl,Release}) ->
io_lib:format("No sasl application in release ~ts, ~ts. "
"Can not be upgraded.",
[Release#release.name, Release#release.vsn]);
+format_error({warnings_treated_as_errors, Warnings}) ->
+ io_lib:format("Warnings being treated as errors:~n~ts",
+ [[format_warning("",W) || W <- Warnings]]);
format_error(Error) ->
- io:format("~p~n", [Error]).
+ io_lib:format("~p~n", [Error]).
-print_warnings(Ws, Opts) when is_list(Ws) ->
- lists:foreach(fun(W) -> print_warning(W, Opts) end, Ws);
-print_warnings(W, Opts) ->
- print_warning(W, Opts).
+print_warnings(Ws) when is_list(Ws) ->
+ lists:foreach(fun(W) -> print_warning(W) end, Ws);
+print_warnings(W) ->
+ print_warning(W).
-print_warning(W, Opts) ->
- Prefix = case lists:member(warnings_as_errors, Opts) of
- true ->
- "";
- false ->
- "*WARNING* "
- end,
- S = format_warning(Prefix, W),
- io:format("~ts", [S]).
+print_warning(W) ->
+ io:format("~ts", [format_warning(W)]).
format_warning(W) ->
format_warning("*WARNING* ", W).
@@ -639,6 +623,8 @@ format_warning(W) ->
format_warning(Prefix, {erts_vsn_changed, {Rel1, Rel2}}) ->
io_lib:format("~tsThe ERTS version changed between ~p and ~p~n",
[Prefix, Rel1, Rel2]);
+format_warning(Prefix, pre_R15_emulator_upgrade) ->
+ io_lib:format("~tsUpgrade from an OTP version earlier than R15. New code should be compiled with the old emulator.~n",[Prefix]);
format_warning(Prefix, What) ->
io_lib:format("~ts~p~n",[Prefix, What]).
diff --git a/lib/sasl/test/systools_SUITE.erl b/lib/sasl/test/systools_SUITE.erl
index dd5f277a77..0c98232467 100644
--- a/lib/sasl/test/systools_SUITE.erl
+++ b/lib/sasl/test/systools_SUITE.erl
@@ -29,6 +29,8 @@
-module(systools_SUITE).
+-compile(export_all).
+
%%-define(debug, true).
-include_lib("common_test/include/ct.hrl").
@@ -39,31 +41,6 @@
-include_lib("kernel/include/file.hrl").
--export([all/0,suite/0,groups/0,init_per_group/2,end_per_group/2]).
-
--export([script_options/1, normal_script/1, unicode_script/1,
- unicode_script/2, no_mod_vsn_script/1,
- wildcard_script/1, variable_script/1, no_sasl_script/1,
- no_dot_erlang_script/1,
- abnormal_script/1, src_tests_script/1, crazy_script/1,
- included_script/1, included_override_script/1,
- included_fail_script/1, included_bug_script/1, exref_script/1,
- duplicate_modules_script/1,
- otp_3065_circular_dependenies/1, included_and_used_sort_script/1]).
--export([tar_options/1, normal_tar/1, no_mod_vsn_tar/1, system_files_tar/1,
- system_files_tar/2, invalid_system_files_tar/1,
- invalid_system_files_tar/2, variable_tar/1,
- src_tests_tar/1, var_tar/1, exref_tar/1, link_tar/1,
- otp_9507_path_ebin/1]).
--export([normal_relup/1, restart_relup/1, abnormal_relup/1, no_sasl_relup/1,
- no_appup_relup/1, bad_appup_relup/1, app_start_type_relup/1,
- regexp_relup/1]).
--export([normal_hybrid/1,hybrid_no_old_sasl/1,hybrid_no_new_sasl/1]).
--export([otp_6226_outdir/1, app_file_defaults/1]).
--export([init_per_suite/1, end_per_suite/1,
- init_per_testcase/2, end_per_testcase/2]).
--export([delete_tree/1]).
-
-import(lists, [foldl/3]).
-define(default_timeout, ?t:minutes(20)).
@@ -91,7 +68,8 @@ groups() ->
{tar, [],
[tar_options, normal_tar, no_mod_vsn_tar, system_files_tar,
invalid_system_files_tar, variable_tar,
- src_tests_tar, var_tar, exref_tar, link_tar, otp_9507_path_ebin]},
+ src_tests_tar, var_tar, exref_tar, link_tar, no_sasl_tar,
+ otp_9507_path_ebin]},
{relup, [],
[normal_relup, restart_relup, abnormal_relup, no_sasl_relup,
no_appup_relup, bad_appup_relup, app_start_type_relup, regexp_relup
@@ -238,6 +216,7 @@ normal_script(Config) when is_list(Config) ->
%% Check the same but w. silent flag
{ok, _, []} = systools:make_script(LatestName, [silent]),
+ {ok, _, []} = systools:make_script(LatestName, [silent,warnings_as_errors]),
%% Use the local option
ok = systools:make_script(LatestName, [local]),
@@ -456,9 +435,16 @@ no_sasl_script(Config) when is_list(Config) ->
{ok, _ , [{warning,missing_sasl}]} =
systools:make_script(LatestName,[{path, P},silent]),
+ {error, systools_make, {warnings_treated_as_errors,[missing_sasl]}} =
+ systools:make_script(LatestName,[{path, P},silent,warnings_as_errors]),
+
{ok, _ , []} =
systools:make_script(LatestName,[{path, P},silent, no_warn_sasl]),
+ {ok, _ , []} =
+ systools:make_script(LatestName,[{path, P},silent, no_warn_sasl,
+ warnings_as_errors]),
+
ok = file:set_cwd(OldDir),
ok.
@@ -525,7 +511,9 @@ src_tests_script(Config) when is_list(Config) ->
ok = file:delete(BootFile),
false = filelib:is_regular(BootFile),
%% With warnings_as_errors and src_tests option, an error should be issued
- error =
+ {error, systools_make,
+ {warnings_treated_as_errors, [{obj_out_of_date,_},
+ {source_not_found,_}]}} =
systools:make_script(LatestName, [silent, {path, N}, src_tests,
warnings_as_errors]),
error =
@@ -745,7 +733,7 @@ exref_script(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [{path,P}, silent]),
+ {ok, _, []} = systools:make_script(LatestName, [{path,P}, silent]),
%% Complete exref
{ok, _, W1} =
@@ -894,10 +882,10 @@ normal_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
ok = systools:make_tar(LatestName, [{path, P}]),
ok = check_tar(fname([lib,'db-2.1',ebin,'db.app']), LatestName),
- {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]),
+ {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]),
ok = check_tar(fname([lib,'fe-3.1',ebin,'fe.app']), LatestName),
ok = file:set_cwd(OldDir),
@@ -918,10 +906,10 @@ no_mod_vsn_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
ok = systools:make_tar(LatestName, [{path, P}]),
ok = check_tar(fname([lib,'db-3.1',ebin,'db.app']), LatestName),
- {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]),
+ {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]),
ok = check_tar(fname([lib,'fe-3.1',ebin,'fe.app']), LatestName),
ok = file:set_cwd(OldDir),
@@ -945,11 +933,11 @@ system_files_tar(Config) ->
ok = file:write_file("sys.config","[].\n"),
ok = file:write_file("relup","{\"LATEST\",[],[]}.\n"),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
ok = systools:make_tar(LatestName, [{path, P}]),
ok = check_tar(fname(["releases","LATEST","sys.config"]), LatestName),
ok = check_tar(fname(["releases","LATEST","relup"]), LatestName),
- {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]),
+ {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]),
ok = check_tar(fname(["releases","LATEST","sys.config"]), LatestName),
ok = check_tar(fname(["releases","LATEST","relup"]), LatestName),
@@ -978,7 +966,7 @@ invalid_system_files_tar(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
%% Add dummy relup and sys.config - faulty sys.config
ok = file:write_file("sys.config","[]\n"), %!!! syntax error - missing '.'
@@ -1036,7 +1024,7 @@ variable_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName,
+ {ok, _, []} = systools:make_script(LatestName,
[silent,
{path, P},
{variables,[{"TEST", LibDir}]}]),
@@ -1045,7 +1033,7 @@ variable_tar(Config) when is_list(Config) ->
{variables,[{"TEST", LibDir}]}]),
ok = check_var_tar("TEST", LatestName),
- {ok, _, _} = systools:make_tar(LatestName,
+ {ok, _, []} = systools:make_tar(LatestName,
[{path, P}, silent,
{variables,[{"TEST", LibDir}]}]),
ok = check_var_tar("TEST", LatestName),
@@ -1174,7 +1162,7 @@ var_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName,
+ {ok, _, []} = systools:make_script(LatestName,
[silent,
{path, P},
{variables,[{"TEST", LibDir}]}]),
@@ -1218,7 +1206,7 @@ exref_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
%% Complete exref
{ok, _, W1} =
@@ -1248,7 +1236,41 @@ exref_tar(Config) when is_list(Config) ->
ok = file:set_cwd(OldDir),
ok.
+%% make_tar: Create tar without sasl appl. Check warning.
+no_sasl_tar(Config) when is_list(Config) ->
+ {ok, OldDir} = file:get_cwd(),
+ {LatestDir, LatestName} = create_script(latest1_no_sasl,Config),
+
+ DataDir = filename:absname(?copydir),
+ LibDir = fname([DataDir, d_normal, lib]),
+ P = [fname([LibDir, '*', ebin]),
+ fname([DataDir, lib, kernel, ebin]),
+ fname([DataDir, lib, stdlib, ebin]),
+ fname([DataDir, lib, sasl, ebin])],
+
+ ok = file:set_cwd(LatestDir),
+
+ {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ ok = systools:make_tar(LatestName, [{path, P}]),
+ {ok, _, [{warning,missing_sasl}]} =
+ systools:make_tar(LatestName, [{path, P}, silent]),
+ {ok, _, []} =
+ systools:make_tar(LatestName, [{path, P}, silent, no_warn_sasl]),
+ {ok, _, []} =
+ systools:make_tar(LatestName, [{path, P}, silent, no_warn_sasl,
+ warnings_as_errors]),
+ TarFile = LatestName ++ ".tar.gz",
+ true = filelib:is_regular(TarFile),
+ ok = file:delete(TarFile),
+ {error, systools_make, {warnings_treated_as_errors,[missing_sasl]}} =
+ systools:make_tar(LatestName, [{path, P}, silent, warnings_as_errors]),
+ error =
+ systools:make_tar(LatestName, [{path, P}, warnings_as_errors]),
+ false = filelib:is_regular(TarFile),
+
+ ok = file:set_cwd(OldDir),
+ ok.
%% make_tar: OTP-9507 - make_tar failed when path given as just 'ebin'.
otp_9507_path_ebin(Config) when is_list(Config) ->
@@ -1268,7 +1290,7 @@ otp_9507_path_ebin(Config) when is_list(Config) ->
fname([DataDir, lib, kernel, ebin]),
fname([DataDir, lib, stdlib, ebin]),
fname([DataDir, lib, sasl, ebin])],
- {ok, _, _} = systools:make_script(RelName, [silent, {path, P1}]),
+ {ok, _, []} = systools:make_script(RelName, [silent, {path, P1}]),
ok = systools:make_tar(RelName, [{path, P1}]),
Content1 = tar_contents(RelName),
@@ -1309,7 +1331,7 @@ normal_relup(Config) when is_list(Config) ->
ok = systools:make_relup(LatestName, [LatestName1], [LatestName1],
[{path, P}]),
ok = check_relup([{db, "2.1"}], [{db, "1.0"}]),
- {ok, _, _, []} =
+ {ok, Relup, _, []} =
systools:make_relup(LatestName, [LatestName1], [LatestName1],
[{path, P}, silent]),
ok = check_relup([{db, "2.1"}], [{db, "1.0"}]),
@@ -1322,7 +1344,9 @@ normal_relup(Config) when is_list(Config) ->
error =
systools:make_relup(LatestName, [LatestName2], [LatestName1],
[{path, P}, warnings_as_errors]),
- error =
+ {error, systools_relup,
+ {warnings_treated_as_errors,[pre_R15_emulator_upgrade,
+ {erts_vsn_changed, _}]}} =
systools:make_relup(LatestName, [LatestName2], [LatestName1],
[{path, P}, silent, warnings_as_errors]),
@@ -1341,6 +1365,14 @@ normal_relup(Config) when is_list(Config) ->
%% relup file should exist now
true = filelib:is_regular("relup"),
+ %% file should not be written if noexec option is used.
+ %% delete before running tests.
+ ok = file:delete("relup"),
+ {ok,Relup,_,[]} =
+ systools:make_relup(LatestName, [LatestName1], [LatestName1],
+ [{path, P}, noexec]),
+ false = filelib:is_regular("relup"),
+
ok = file:set_cwd(OldDir),
ok.
diff --git a/lib/snmp/src/app/snmp.appup.src b/lib/snmp/src/app/snmp.appup.src
index ca61782639..db09ec3dc5 100644
--- a/lib/snmp/src/app/snmp.appup.src
+++ b/lib/snmp/src/app/snmp.appup.src
@@ -8,6 +8,10 @@
%% {update, snmpa_local_db, soft, soft_purge, soft_purge, []}
%% {add_module, snmpm_net_if_mt}
[
+ {<<"5\\.2\\.4">>,
+ [{load_module, snmp, soft_purge, soft_purge, []},
+ {load_module, snmpc_lib, soft_purge, soft_purge, []},
+ {load_module, snmpc_mib_gram, soft_purge, soft_purge, []}]},
{<<"5\\..*">>, [{restart_application, snmp}]},
{<<"4\\..*">>, [{restart_application, snmp}]}
],
@@ -17,6 +21,10 @@
%% {remove, {snmpm_net_if_mt, soft_purge, soft_purge}}
[
+ {<<"5\\.2\\.4">>,
+ [{load_module, snmp, soft_purge, soft_purge, []},
+ {load_module, snmpc_lib, soft_purge, soft_purge, []},
+ {load_module, snmpc_mib_gram, soft_purge, soft_purge, []}]},
{<<"5\\..*">>, [{restart_application, snmp}]},
{<<"4\\..*">>, [{restart_application, snmp}]}
]
diff --git a/lib/snmp/src/app/snmp.erl b/lib/snmp/src/app/snmp.erl
index df3933ea01..8a736f688b 100644
--- a/lib/snmp/src/app/snmp.erl
+++ b/lib/snmp/src/app/snmp.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -573,9 +573,16 @@ print_mod_info(Prefix, {Module, Info}) ->
CompDate =
case key1search(compile_time, Info) of
{value, {Year, Month, Day, Hour, Min, Sec}} ->
- lists:flatten(
- io_lib:format("~w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w",
- [Year, Month, Day, Hour, Min, Sec]));
+ io_lib:format(
+ "~w-~2..0w-~2..0w ~2..0w:~2..0w:~2..0w",
+ [Year, Month, Day, Hour, Min, Sec]);
+ _ ->
+ "Not found"
+ end,
+ Digest =
+ case key1search(md5, Info) of
+ {value, MD5} when is_binary(MD5) ->
+ [io_lib:format("~2.16.0b", [Byte]) || <<Byte>> <= MD5];
_ ->
"Not found"
end,
@@ -583,12 +590,14 @@ print_mod_info(Prefix, {Module, Info}) ->
"~s Vsn: ~s~n"
"~s App vsn: ~s~n"
"~s Compiler ver: ~s~n"
- "~s Compile time: ~s~n",
+ "~s Compile time: ~s~n"
+ "~s MD5 digest: ~s~n",
[Prefix, Module,
Prefix, Vsn,
Prefix, AppVsn,
- Prefix, CompVer,
- Prefix, CompDate]),
+ Prefix, CompVer,
+ Prefix, CompDate,
+ Prefix, Digest]),
ok.
key1search(Key, Vals) ->
@@ -617,7 +626,7 @@ versions1() ->
Error ->
Error
end.
-
+
versions2() ->
case ms2() of
{ok, Mods} ->
@@ -625,25 +634,56 @@ versions2() ->
Error ->
Error
end.
-
+
version_info(Mods) ->
SysInfo = sys_info(),
OsInfo = os_info(),
ModInfo = [mod_version_info(Mod) || Mod <- Mods],
[{sys_info, SysInfo}, {os_info, OsInfo}, {mod_info, ModInfo}].
-
+
mod_version_info(Mod) ->
Info = Mod:module_info(),
- {value, {attributes, Attr}} = lists:keysearch(attributes, 1, Info),
- {value, {vsn, [Vsn]}} = lists:keysearch(vsn, 1, Attr),
- {value, {app_vsn, AppVsn}} = lists:keysearch(app_vsn, 1, Attr),
- {value, {compile, Comp}} = lists:keysearch(compile, 1, Info),
- {value, {version, Ver}} = lists:keysearch(version, 1, Comp),
- {value, {time, Time}} = lists:keysearch(time, 1, Comp),
- {Mod, [{vsn, Vsn},
- {app_vsn, AppVsn},
- {compiler_version, Ver},
- {compile_time, Time}]}.
+ {Mod,
+ case key1search(attributes, Info) of
+ {value, Attr} ->
+ case key1search(vsn, Attr) of
+ {value, [Vsn]} ->
+ [{vsn, Vsn}];
+ not_found ->
+ []
+ end ++
+ case key1search(app_vsn, Attr) of
+ {value, AppVsn} ->
+ [{app_vsn, AppVsn}];
+ not_found ->
+ []
+ end;
+ not_found ->
+ []
+ end ++
+ case key1search(compile, Info) of
+ {value, Comp} ->
+ case key1search(version, Comp) of
+ {value, Ver} ->
+ [{compiler_version, Ver}];
+ not_found ->
+ []
+ end ++
+ case key1search(time, Comp) of
+ {value, Ver} ->
+ [{compile_time, Ver}];
+ not_found ->
+ []
+ end;
+ not_found ->
+ []
+ end ++
+ case key1search(md5, Info) of
+ {value, Bin} ->
+ [{md5, Bin}];
+ not_found ->
+ []
+ end}.
sys_info() ->
SysArch = string:strip(erlang:system_info(system_architecture),right,$\n),
diff --git a/lib/snmp/src/compile/snmpc_lib.erl b/lib/snmp/src/compile/snmpc_lib.erl
index 51690b6e7e..33ddd78308 100644
--- a/lib/snmp/src/compile/snmpc_lib.erl
+++ b/lib/snmp/src/compile/snmpc_lib.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -99,7 +99,7 @@ make_ASN1type({{type_with_size,Type,{range,Lo,Hi}},Line}) ->
print_error("Undefined type '~w'",[Type],Line),
guess_string_type()
end;
-make_ASN1type({{integer_with_enum,Type,Enums},Line}) ->
+make_ASN1type({{type_with_enum,Type,Enums},Line}) ->
case lookup_vartype(Type) of
{value,ASN1type} -> ASN1type#asn1_type{assocList = [{enums, Enums}]};
false ->
diff --git a/lib/snmp/src/compile/snmpc_mib_gram.yrl b/lib/snmp/src/compile/snmpc_mib_gram.yrl
index 743c3a6550..14a668127e 100644
--- a/lib/snmp/src/compile/snmpc_mib_gram.yrl
+++ b/lib/snmp/src/compile/snmpc_mib_gram.yrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -387,10 +387,12 @@ syntax -> type : {{type, cat('$1')},line_of('$1')}.
syntax -> type size : {{type_with_size, cat('$1'), '$2'},line_of('$1')}.
syntax -> usertype size : {{type_with_size,val('$1'), '$2'},line_of('$1')}.
syntax -> 'INTEGER' '{' namedbits '}' :
- {{integer_with_enum, 'INTEGER', '$3'}, line_of('$1')}.
+ {{type_with_enum, 'INTEGER', '$3'}, line_of('$1')}.
syntax -> 'BITS' '{' namedbits '}' :
ensure_ver(2,'$1'),
{{bits, '$3'}, line_of('$1')}.
+syntax -> usertype '{' namedbits '}' :
+ {{type_with_enum, 'INTEGER', '$3'}, line_of('$1')}.
syntax -> 'SEQUENCE' 'OF' usertype :
{{sequence_of,val('$3')},line_of('$1')}.
diff --git a/lib/snmp/test/snmp_compiler_test.erl b/lib/snmp/test/snmp_compiler_test.erl
index 2c8851c2a7..9b3c2bfd2c 100644
--- a/lib/snmp/test/snmp_compiler_test.erl
+++ b/lib/snmp/test/snmp_compiler_test.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2003-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2003-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -56,7 +56,8 @@
otp_8574/1,
otp_8595/1,
otp_10799/1,
- otp_10808/1
+ otp_10808/1,
+ otp_14145/1
]).
@@ -135,7 +136,8 @@ all() ->
].
groups() ->
- [{tickets, [], [otp_6150, otp_8574, otp_8595, otp_10799, otp_10808]}].
+ [{tickets, [],
+ [otp_6150, otp_8574, otp_8595, otp_10799, otp_10808, otp_14145]}].
init_per_group(_GroupName, Config) ->
Config.
@@ -431,6 +433,30 @@ otp_10808(Config) when is_list(Config) ->
%%======================================================================
+otp_14145(suite) ->
+ [];
+otp_14145(Config) when is_list(Config) ->
+ put(tname, otp10808),
+ p("starting with Config: ~p~n", [Config]),
+
+ Dir = ?config(case_top_dir, Config),
+ MibDir = ?config(mib_dir, Config),
+ MibName = "OTP14145-MIB",
+ MibFile = join(MibDir, MibName++".mib"),
+ ?line {ok, MibBin} =
+ snmpc:compile(MibFile, [{outdir, Dir},
+ {verbosity, trace},
+ {group_check, false},
+ module_compliance]),
+ p("Mib: ~n~p~n", [MibBin]),
+ MIB = read_mib(MibBin),
+ Oid = [1,3,6,1,2,1,67,4],
+ check_mib(MIB#mib.mes, Oid, undefined),
+ ok.
+
+
+%%======================================================================
+
augments_extra_info(suite) ->
[];
augments_extra_info(Config) when is_list(Config) ->
diff --git a/lib/snmp/test/snmp_manager_test.erl b/lib/snmp/test/snmp_manager_test.erl
index 71f4017d8b..054e998af4 100644
--- a/lib/snmp/test/snmp_manager_test.erl
+++ b/lib/snmp/test/snmp_manager_test.erl
@@ -1760,7 +1760,7 @@ do_simple_sync_get2(Node, TargetName, Oids, Get, PostVerify)
"~n Rem: ~w", [Reply, _Rem]),
%% verify that the operation actually worked:
- %% The order should be the same, so no need to seach
+ %% The order should be the same, so no need to search
?line ok = case Reply of
{noError, 0, [#varbind{oid = ?sysObjectID_instance,
value = SysObjectID},
@@ -2709,7 +2709,7 @@ do_simple_set2(Node, TargetName, VAVs, Set, PostVerify) ->
"~n Rem: ~w", [Reply, _Rem]),
%% verify that the operation actually worked:
- %% The order should be the same, so no need to seach
+ %% The order should be the same, so no need to search
%% The value we get should be exactly the same as we sent
?line ok = case Reply of
{noError, 0, [#varbind{oid = ?sysName_instance,
@@ -5118,10 +5118,10 @@ inform_swarm_collector(N) ->
%% Note that we need to deal with re-transmissions!
%% That is, the agent did not receive the ack in time,
-%% and therefor did a re-transmit. This means that we
-%% expect to receive more inform's then we actually
-%% sent. So for sucess we assume:
-%%
+%% and therefor did a re-transmit. This means that we
+%% expect to receive more inform's then we actually
+%% sent. So for success we assume:
+%%
%% SentAckCnt = N
%% RespCnt = N
%% RecvCnt >= N
diff --git a/lib/snmp/test/snmp_test_data/OTP14145-MIB.mib b/lib/snmp/test/snmp_test_data/OTP14145-MIB.mib
new file mode 100644
index 0000000000..f29c65c4c2
--- /dev/null
+++ b/lib/snmp/test/snmp_test_data/OTP14145-MIB.mib
@@ -0,0 +1,44 @@
+OTP14145-MIB DEFINITIONS ::= BEGIN
+
+IMPORTS
+ MODULE-IDENTITY, OBJECT-TYPE,
+ mib-2 FROM SNMPv2-SMI
+ InetAddressType, InetAddress FROM INET-ADDRESS-MIB
+ MODULE-COMPLIANCE, OBJECT-GROUP FROM SNMPv2-CONF;
+
+testMibId MODULE-IDENTITY
+ LAST-UPDATED "200608210000Z" -- 21 August 2006
+ ORGANIZATION "a"
+ CONTACT-INFO "a"
+ DESCRIPTION "a"
+ REVISION "200608210000Z" -- 21 August 2006
+ DESCRIPTION "a"
+ ::= { mib-2 67 }
+
+testObj OBJECT-TYPE
+ SYNTAX InetAddressType
+ -- SYNTAX InetAddress
+ MAX-ACCESS read-only
+ STATUS current
+ DESCRIPTION "a"
+ ::= { testMibId 2 }
+
+testObjId OBJECT IDENTIFIER ::= { testMibId 3 }
+
+testMibCompliance MODULE-COMPLIANCE
+ STATUS current
+ DESCRIPTION "a"
+ MODULE
+ OBJECT testObj
+ SYNTAX InetAddressType { ipv4(1), ipv6(2) }
+ -- SYNTAX InetAddress ( SIZE(4|16) )
+ DESCRIPTION "a"
+ ::= { testMibId 4 }
+
+testObjGroup OBJECT-GROUP
+ OBJECTS { testObj }
+ STATUS current
+ DESCRIPTION "a"
+ ::= { testObjId 1 }
+
+END
diff --git a/lib/snmp/vsn.mk b/lib/snmp/vsn.mk
index 28eba0d0d6..30b8ee1124 100644
--- a/lib/snmp/vsn.mk
+++ b/lib/snmp/vsn.mk
@@ -2,7 +2,7 @@
# %CopyrightBegin%
#
-# Copyright Ericsson AB 1997-2016. All Rights Reserved.
+# Copyright Ericsson AB 1997-2017. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -19,6 +19,6 @@
# %CopyrightEnd%
APPLICATION = snmp
-SNMP_VSN = 5.2.4
+SNMP_VSN = 5.2.5
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(SNMP_VSN)$(PRE_VSN)"
diff --git a/lib/ssh/doc/src/ssh.xml b/lib/ssh/doc/src/ssh.xml
index 6b49f89449..1f07e826ce 100644
--- a/lib/ssh/doc/src/ssh.xml
+++ b/lib/ssh/doc/src/ssh.xml
@@ -153,7 +153,7 @@
<item>
<p>IP version to use.</p>
</item>
- <tag><c><![CDATA[{user_dir, string()}]]></c></tag>
+ <tag><marker id="opt_user_dir"></marker><c><![CDATA[{user_dir, string()}]]></c></tag>
<item>
<p>Sets the user directory, that is, the directory containing
<c>ssh</c> configuration files for the user, such as
@@ -175,22 +175,48 @@
supplied with this option.
</p>
</item>
- <tag><c><![CDATA[{silently_accept_hosts, boolean() | accept_fun() | {crypto:digest_type(), accept_fun()} }]]></c>
- <br/>
- <c><![CDATA[accept_fun() :: fun(PeerName::string(), FingerPrint::string()) -> boolean()]]></c>
+ <tag>
+ <c><![CDATA[{silently_accept_hosts, boolean()}]]></c> <br/>
+ <c><![CDATA[{silently_accept_hosts, CallbackFun}]]></c> <br/>
+ <c><![CDATA[{silently_accept_hosts, {HashAlgoSpec, CallbackFun} }]]></c> <br/>
+ <br/>
+ <c><![CDATA[HashAlgoSpec = crypto:digest_type() | [ crypto:digest_type() ] ]]></c><br/>
+ <c><![CDATA[CallbackFun = fun(PeerName, FingerPrint) -> boolean()]]></c><br/>
+ <c><![CDATA[PeerName = string()]]></c><br/>
+ <c><![CDATA[FingerPrint = string() | [ string() ] ]]></c>
</tag>
<item>
- <p>When <c>true</c>, hosts are added to the
- file <c><![CDATA[known_hosts]]></c> without asking the user.
- Defaults to <c>false</c> which will give a user question on stdio of whether to accept or reject a previously
- unseen host.</p>
- <p>If the option value is has an <c>accept_fun()</c>, that fun will called with the arguments
- <c>(PeerName, PeerHostKeyFingerPrint)</c>. The fingerprint is calculated on the Peer's Host Key with
- <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-1">public_key:ssh_hostkey_fingerprint/1</seealso>.
- </p>
- <p>If the <c>crypto:digest_type()</c> is present, the fingerprint is calculated with that digest type by the function
- <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-2">public_key:ssh_hostkey_fingerprint/2</seealso>.
- </p>
+ <p>This option guides the <c>connect</c> function how to act when the connected server presents a Host
+ Key that the client has not seen before. The default is to ask the user with a question on stdio of whether to
+ accept or reject the new Host Key.
+ See also the option <seealso marker="#opt_user_dir"><c>user_dir</c></seealso>
+ for the path to the file <c>known_hosts</c> where previously accepted Host Keys are recorded.
+ </p>
+ <p>The option can be given in three different forms as seen above:</p>
+ <list>
+ <item>The value is a <c>boolean()</c>. The value <c>true</c> will make the client accept any unknown
+ Host Key without any user interaction. The value <c>false</c> keeps the default behaviour of asking the
+ the user on stdio.
+ </item>
+ <item>A <c>CallbackFun</c> will be called and the boolean return value <c>true</c> will make the client
+ accept the Host Key. A return value of <c>false</c> will make the client to reject the Host Key and therefore
+ also the connection will be closed. The arguments to the fun are:
+ <list type="bulleted">
+ <item><c>PeerName</c> - a string with the name or address of the remote host.</item>
+ <item><c>FingerPrint</c> - the fingerprint of the Host Key as
+ <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-1">public_key:ssh_hostkey_fingerprint/1</seealso>
+ calculates it.
+ </item>
+ </list>
+ </item>
+ <item>A tuple <c>{HashAlgoSpec, CallbackFun}</c>. The <c>HashAlgoSpec</c> specifies which hash algorithm
+ shall be used to calculate the fingerprint used in the call of the <c>CallbackFun</c>. The <c>HashALgoSpec</c>
+ is either an atom or a list of atoms as the first argument in
+ <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-2">public_key:ssh_hostkey_fingerprint/2</seealso>.
+ If it is a list of hash algorithm names, the <c>FingerPrint</c> argument in the <c>CallbackFun</c> will be
+ a list of fingerprints in the same order as the corresponding name in the <c>HashAlgoSpec</c> list.
+ </item>
+ </list>
</item>
<tag><c><![CDATA[{user_interaction, boolean()}]]></c></tag>
<item>
@@ -200,7 +226,7 @@
supplying a password. Defaults to <c>true</c>.
Even if user interaction is allowed it can be
suppressed by other options, such as <c>silently_accept_hosts</c>
- and <c>password</c>. However, those optins are not always desirable
+ and <c>password</c>. However, those options are not always desirable
to use from a security point of view.</p>
</item>
@@ -700,9 +726,10 @@
</func>
<func>
- <name>daemon_info(Daemon) -> {ok, [{port,Port}]} | {error,Error}</name>
+ <name>daemon_info(Daemon) -> {ok, [DaemonInfo]} | {error,Error}</name>
<fsummary>Get info about a daemon</fsummary>
<type>
+ <v>DaemonInfo = {port,Port::pos_integer()} | {listen_address, any|ip_address()} | {profile,atom()}</v>
<v>Port = integer()</v>
<v>Error = bad_daemon_ref</v>
</type>
diff --git a/lib/ssh/doc/src/ssh_app.xml b/lib/ssh/doc/src/ssh_app.xml
index 5cc4c24889..5f710decc1 100644
--- a/lib/ssh/doc/src/ssh_app.xml
+++ b/lib/ssh/doc/src/ssh_app.xml
@@ -146,7 +146,10 @@
<item>diffie-hellman-group-exchange-sha1</item>
<item>diffie-hellman-group-exchange-sha256</item>
<item>diffie-hellman-group14-sha1</item>
- <item>diffie-hellman-group1-sha1</item>
+ <item>diffie-hellman-group14-sha256</item>
+ <item>diffie-hellman-group16-sha512</item>
+ <item>diffie-hellman-group18-sha512</item>
+ <item>(diffie-hellman-group1-sha1, retired: can be enabled with the <c>preferred_algorithms</c> option)</item>
</list>
</item>
@@ -157,7 +160,7 @@
<item>ecdsa-sha2-nistp384</item>
<item>ecdsa-sha2-nistp521</item>
<item>ssh-rsa</item>
- <item>ssh-dss</item>
+ <item>(ssh-dss, retired: can be enabled with the <c>preferred_algorithms</c> option)</item>
</list>
</item>
@@ -306,6 +309,8 @@
<p>Comment: Defines hmac-sha2-256 and hmac-sha2-512
</p>
</item>
+
+ <item>Work in progress: <url href="https://tools.ietf.org/html/draft-ietf-curdle-ssh-kex-sha2">https://tools.ietf.org/html/draft-ietf-curdle-ssh-kex-sha2-05</url>, Key Exchange (KEX) Method Updates and Recommendations for Secure Shell (SSH)</item>
</list>
diff --git a/lib/ssh/doc/src/using_ssh.xml b/lib/ssh/doc/src/using_ssh.xml
index 0861c641c7..864378b640 100644
--- a/lib/ssh/doc/src/using_ssh.xml
+++ b/lib/ssh/doc/src/using_ssh.xml
@@ -305,7 +305,7 @@ ok = erl_tar:close(HandleRead),
<code type="erl" >
-module(ssh_echo_server).
--behaviour(ssh_subsystem).
+-behaviour(ssh_daemon_channel).
-record(state, {
n,
id,
diff --git a/lib/ssh/src/Makefile b/lib/ssh/src/Makefile
index 7ab6f22424..f826fdfd9b 100644
--- a/lib/ssh/src/Makefile
+++ b/lib/ssh/src/Makefile
@@ -51,6 +51,7 @@ MODULES= \
ssh_sup \
sshc_sup \
sshd_sup \
+ ssh_options \
ssh_connection_sup \
ssh_connection \
ssh_connection_handler \
diff --git a/lib/ssh/src/ssh.app.src b/lib/ssh/src/ssh.app.src
index 76b7d8cd55..974292fde1 100644
--- a/lib/ssh/src/ssh.app.src
+++ b/lib/ssh/src/ssh.app.src
@@ -7,6 +7,7 @@
ssh_app,
ssh_acceptor,
ssh_acceptor_sup,
+ ssh_options,
ssh_auth,
ssh_message,
ssh_bits,
@@ -41,11 +42,10 @@
{env, []},
{mod, {ssh_app, []}},
{runtime_dependencies, [
- "crypto-3.3",
+ "crypto-3.7.3",
"erts-6.0",
"kernel-3.0",
- "public_key-1.1",
- "stdlib-3.1"
+ "public_key-1.4",
+ "stdlib-3.3"
]}]}.
-
diff --git a/lib/ssh/src/ssh.erl b/lib/ssh/src/ssh.erl
index 31e343e81b..e2a289d737 100644
--- a/lib/ssh/src/ssh.erl
+++ b/lib/ssh/src/ssh.erl
@@ -40,10 +40,24 @@
]).
%%% Type exports
--export_type([connection_ref/0,
- channel_id/0
+-export_type([ssh_daemon_ref/0,
+ ssh_connection_ref/0,
+ ssh_channel_id/0,
+ role/0,
+ subsystem_spec/0,
+ subsystem_name/0,
+ channel_callback/0,
+ channel_init_args/0,
+ algs_list/0,
+ alg_entry/0,
+ simple_algs/0,
+ double_algs/0
]).
+-opaque ssh_daemon_ref() :: daemon_ref() .
+-opaque ssh_connection_ref() :: connection_ref() .
+-opaque ssh_channel_id() :: channel_id().
+
%%--------------------------------------------------------------------
-spec start() -> ok | {error, term()}.
-spec start(permanent | transient | temporary) -> ok | {error, term()}.
@@ -71,55 +85,63 @@ stop() ->
application:stop(ssh).
%%--------------------------------------------------------------------
--spec connect(port(), proplists:proplist()) -> {ok, pid()} | {error, term()}.
+-spec connect(inet:socket(), proplists:proplist()) -> ok_error(connection_ref()).
+
+-spec connect(inet:socket(), proplists:proplist(), timeout()) -> ok_error(connection_ref())
+ ; (string(), inet:port_number(), proplists:proplist()) -> ok_error(connection_ref()).
--spec connect(port(), proplists:proplist(), timeout()) -> {ok, pid()} | {error, term()}
- ; (string(), integer(), proplists:proplist()) -> {ok, pid()} | {error, term()}.
+-spec connect(string(), inet:port_number(), proplists:proplist(), timeout()) -> ok_error(connection_ref()).
--spec connect(string(), integer(), proplists:proplist(), timeout()) -> {ok, pid()} | {error, term()}.
%%
%% Description: Starts an ssh connection.
%%--------------------------------------------------------------------
-connect(Socket, Options) ->
- connect(Socket, Options, infinity).
+connect(Socket, UserOptions) when is_port(Socket),
+ is_list(UserOptions) ->
+ connect(Socket, UserOptions, infinity).
-connect(Socket, Options, Timeout) when is_port(Socket) ->
- case handle_options(Options) of
+connect(Socket, UserOptions, Timeout) when is_port(Socket),
+ is_list(UserOptions) ->
+ case ssh_options:handle_options(client, UserOptions) of
{error, Error} ->
{error, Error};
- {_SocketOptions, SshOptions} ->
- case valid_socket_to_use(Socket, Options) of
+ Options ->
+ case valid_socket_to_use(Socket, ?GET_OPT(transport,Options)) of
ok ->
{ok, {Host,_Port}} = inet:sockname(Socket),
- Opts = [{user_pid,self()}, {host,fmt_host(Host)} | SshOptions],
+ Opts = ?PUT_INTERNAL_OPT([{user_pid,self()}, {host,fmt_host(Host)}], Options),
ssh_connection_handler:start_connection(client, Socket, Opts, Timeout);
{error,SockError} ->
{error,SockError}
end
end;
-connect(Host, Port, Options) when is_integer(Port), Port>0 ->
- connect(Host, Port, Options, infinity).
+connect(Host, Port, UserOptions) when is_integer(Port),
+ Port>0,
+ is_list(UserOptions) ->
+ connect(Host, Port, UserOptions, infinity).
-connect(Host, Port, Options, Timeout) ->
- case handle_options(Options) of
+connect(Host, Port, UserOptions, Timeout) when is_integer(Port),
+ Port>0,
+ is_list(UserOptions) ->
+ case ssh_options:handle_options(client, UserOptions) of
{error, _Reason} = Error ->
Error;
- {SocketOptions, SshOptions} ->
- {_, Transport, _} = TransportOpts =
- proplists:get_value(transport, Options, {tcp, gen_tcp, tcp_closed}),
- ConnectionTimeout = proplists:get_value(connect_timeout, Options, infinity),
- try Transport:connect(Host, Port, [ {active, false} | SocketOptions], ConnectionTimeout) of
+ Options ->
+ {_, Transport, _} = TransportOpts = ?GET_OPT(transport, Options),
+ ConnectionTimeout = ?GET_OPT(connect_timeout, Options),
+ SocketOpts = [{active,false} | ?GET_OPT(socket_options,Options)],
+ try Transport:connect(Host, Port, SocketOpts, ConnectionTimeout) of
{ok, Socket} ->
- Opts = [{user_pid,self()}, {host,Host} | SshOptions],
+ Opts = ?PUT_INTERNAL_OPT([{user_pid,self()}, {host,Host}], Options),
ssh_connection_handler:start_connection(client, Socket, Opts, Timeout);
{error, Reason} ->
{error, Reason}
catch
- exit:{function_clause, _} ->
+ exit:{function_clause, _F} ->
+ io:format('function_clause ~p~n',[_F]),
{error, {options, {transport, TransportOpts}}};
exit:badarg ->
- {error, {options, {socket_options, SocketOptions}}}
+ {error, {options, {socket_options, SocketOpts}}}
end
end.
@@ -148,9 +170,11 @@ channel_info(ConnectionRef, ChannelId, Options) ->
ssh_connection_handler:channel_info(ConnectionRef, ChannelId, Options).
%%--------------------------------------------------------------------
--spec daemon(integer()) -> {ok, pid()} | {error, term()}.
--spec daemon(integer()|port(), proplists:proplist()) -> {ok, pid()} | {error, term()}.
--spec daemon(any | inet:ip_address(), integer(), proplists:proplist()) -> {ok, pid()} | {error, term()}.
+-spec daemon(inet:port_number()) -> ok_error(daemon_ref()).
+-spec daemon(inet:port_number()|inet:socket(), proplists:proplist()) -> ok_error(daemon_ref()).
+-spec daemon(any | inet:ip_address(), inet:port_number(), proplists:proplist()) -> ok_error(daemon_ref())
+ ;(socket, inet:socket(), proplists:proplist()) -> ok_error(daemon_ref())
+ .
%% Description: Starts a server listening for SSH connections
%% on the given port.
@@ -158,34 +182,38 @@ channel_info(ConnectionRef, ChannelId, Options) ->
daemon(Port) ->
daemon(Port, []).
-daemon(Port, Options) when is_integer(Port) ->
- daemon(any, Port, Options);
-daemon(Socket, Options0) when is_port(Socket) ->
- Options = daemon_shell_opt(Options0),
- start_daemon(Socket, Options).
+daemon(Port, UserOptions) when is_integer(Port), Port >= 0 ->
+ daemon(any, Port, UserOptions);
+
+daemon(Socket, UserOptions) when is_port(Socket) ->
+ daemon(socket, Socket, UserOptions).
-daemon(HostAddr, Port, Options0) ->
- Options1 = daemon_shell_opt(Options0),
- {Host, Inet, Options} = daemon_host_inet_opt(HostAddr, Options1),
- start_daemon(Host, Port, Options, Inet).
+
+daemon(Host0, Port, UserOptions0) ->
+ {Host, UserOptions} = handle_daemon_args(Host0, UserOptions0),
+ start_daemon(Host, Port, ssh_options:handle_options(server, UserOptions)).
%%--------------------------------------------------------------------
+-spec daemon_info(daemon_ref()) -> ok_error( [{atom(), term()}] ).
+
daemon_info(Pid) ->
case catch ssh_system_sup:acceptor_supervisor(Pid) of
AsupPid when is_pid(AsupPid) ->
- [Port] =
- [Prt || {{ssh_acceptor_sup,any,Prt,default},
+ [{ListenAddr,Port,Profile}] =
+ [{LA,Prt,Prf} || {{ssh_acceptor_sup,LA,Prt,Prf},
_WorkerPid,worker,[ssh_acceptor]} <- supervisor:which_children(AsupPid)],
- {ok, [{port,Port}]};
-
+ {ok, [{port,Port},
+ {listen_address,ListenAddr},
+ {profile,Profile}
+ ]};
_ ->
{error,bad_daemon_ref}
end.
%%--------------------------------------------------------------------
--spec stop_listener(pid()) -> ok.
--spec stop_listener(inet:ip_address(), integer()) -> ok.
+-spec stop_listener(daemon_ref()) -> ok.
+-spec stop_listener(inet:ip_address(), inet:port_number()) -> ok.
%%
%% Description: Stops the listener, but leaves
%% existing connections started by the listener up and running.
@@ -198,8 +226,9 @@ stop_listener(Address, Port, Profile) ->
ssh_system_sup:stop_listener(Address, Port, Profile).
%%--------------------------------------------------------------------
--spec stop_daemon(pid()) -> ok.
--spec stop_daemon(inet:ip_address(), integer()) -> ok.
+-spec stop_daemon(daemon_ref()) -> ok.
+-spec stop_daemon(inet:ip_address(), inet:port_number()) -> ok.
+-spec stop_daemon(inet:ip_address(), inet:port_number(), atom()) -> ok.
%%
%% Description: Stops the listener and all connections started by
%% the listener.
@@ -210,10 +239,11 @@ stop_daemon(Address, Port) ->
ssh_system_sup:stop_system(Address, Port, ?DEFAULT_PROFILE).
stop_daemon(Address, Port, Profile) ->
ssh_system_sup:stop_system(Address, Port, Profile).
+
%%--------------------------------------------------------------------
--spec shell(port() | string()) -> _.
--spec shell(port() | string(), proplists:proplist()) -> _.
--spec shell(string(), integer(), proplists:proplist()) -> _.
+-spec shell(inet:socket() | string()) -> _.
+-spec shell(inet:socket() | string(), proplists:proplist()) -> _.
+-spec shell(string(), inet:port_number(), proplists:proplist()) -> _.
%% Host = string()
%% Port = integer()
@@ -254,6 +284,7 @@ start_shell(Error) ->
Error.
%%--------------------------------------------------------------------
+-spec default_algorithms() -> algs_list() .
%%--------------------------------------------------------------------
default_algorithms() ->
ssh_transport:default_algorithms().
@@ -261,109 +292,96 @@ default_algorithms() ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-valid_socket_to_use(Socket, Options) ->
- case proplists:get_value(transport, Options, {tcp, gen_tcp, tcp_closed}) of
- {tcp,_,_} ->
- %% Is this tcp-socket a valid socket?
- case {is_tcp_socket(Socket),
- {ok,[{active,false}]} == inet:getopts(Socket, [active])
- }
- of
- {true, true} ->
- ok;
- {true, false} ->
- {error, not_passive_mode};
- _ ->
- {error, not_tcp_socket}
- end;
- {L4,_,_} ->
- {error, {unsupported,L4}}
+handle_daemon_args(Host, UserOptions0) ->
+ case Host of
+ socket ->
+ {Host, UserOptions0};
+ any ->
+ {ok, Host0} = inet:gethostname(),
+ Inet = proplists:get_value(inet, UserOptions0, inet),
+ {Host0, [Inet | UserOptions0]};
+ {_,_,_,_} ->
+ {Host, [inet, {ip,Host} | UserOptions0]};
+ {_,_,_,_,_,_,_,_} ->
+ {Host, [inet6, {ip,Host} | UserOptions0]};
+ _ ->
+ error(badarg)
end.
-is_tcp_socket(Socket) -> {ok,[]} =/= inet:getopts(Socket, [delay_send]).
-
-
-
-daemon_shell_opt(Options) ->
- case proplists:get_value(shell, Options) of
- undefined ->
- [{shell, {shell, start, []}} | Options];
- _ ->
- Options
- end.
-
-daemon_host_inet_opt(HostAddr, Options1) ->
- case HostAddr of
- any ->
- {ok, Host0} = inet:gethostname(),
- {Host0, proplists:get_value(inet, Options1, inet), Options1};
- {_,_,_,_} ->
- {HostAddr, inet,
- [{ip, HostAddr} | Options1]};
- {_,_,_,_,_,_,_,_} ->
- {HostAddr, inet6,
- [{ip, HostAddr} | Options1]}
- end.
+%%%----------------------------------------------------------------
+valid_socket_to_use(Socket, {tcp,_,_}) ->
+ %% Is this tcp-socket a valid socket?
+ case {is_tcp_socket(Socket),
+ {ok,[{active,false}]} == inet:getopts(Socket, [active])
+ }
+ of
+ {true, true} ->
+ ok;
+ {true, false} ->
+ {error, not_passive_mode};
+ _ ->
+ {error, not_tcp_socket}
+ end;
+valid_socket_to_use(_, {L4,_,_}) ->
+ {error, {unsupported,L4}}.
-start_daemon(Socket, Options) ->
- case handle_options(Options) of
- {error, Error} ->
- {error, Error};
- {SocketOptions, SshOptions} ->
- case valid_socket_to_use(Socket, Options) of
- ok ->
- try
- do_start_daemon(Socket, [{role,server}|SshOptions], SocketOptions)
- catch
- throw:bad_fd -> {error,bad_fd};
- _C:_E -> {error,{cannot_start_daemon,_C,_E}}
- end;
- {error,SockError} ->
- {error,SockError}
- end
+
+is_tcp_socket(Socket) ->
+ case inet:getopts(Socket, [delay_send]) of
+ {ok,[_]} -> true;
+ _ -> false
end.
-start_daemon(Host, Port, Options, Inet) ->
- case handle_options(Options) of
- {error, _Reason} = Error ->
- Error;
- {SocketOptions, SshOptions}->
- try
- do_start_daemon(Host, Port, [{role,server}|SshOptions] , [Inet|SocketOptions])
- catch
- throw:bad_fd -> {error,bad_fd};
- _C:_E -> {error,{cannot_start_daemon,_C,_E}}
- end
+%%%----------------------------------------------------------------
+start_daemon(_, _, {error,Error}) ->
+ {error,Error};
+
+start_daemon(socket, Socket, Options) ->
+ case valid_socket_to_use(Socket, ?GET_OPT(transport,Options)) of
+ ok ->
+ try
+ do_start_daemon(Socket, Options)
+ catch
+ throw:bad_fd -> {error,bad_fd};
+ throw:bad_socket -> {error,bad_socket};
+ _C:_E -> {error,{cannot_start_daemon,_C,_E}}
+ end;
+ {error,SockError} ->
+ {error,SockError}
+ end;
+
+start_daemon(Host, Port, Options) ->
+ try
+ do_start_daemon(Host, Port, Options)
+ catch
+ throw:bad_fd -> {error,bad_fd};
+ throw:bad_socket -> {error,bad_socket};
+ _C:_E -> {error,{cannot_start_daemon,_C,_E}}
end.
-do_start_daemon(Socket, SshOptions, SocketOptions) ->
+
+do_start_daemon(Socket, Options) ->
{ok, {IP,Port}} =
try {ok,_} = inet:sockname(Socket)
catch
_:_ -> throw(bad_socket)
end,
Host = fmt_host(IP),
- Profile = proplists:get_value(profile, SshOptions, ?DEFAULT_PROFILE),
- Opts = [{asocket, Socket},
- {asock_owner,self()},
- {address, Host},
- {port, Port},
- {role, server},
- {socket_opts, SocketOptions},
- {ssh_opts, SshOptions}],
- {_, Callback, _} = proplists:get_value(transport, SshOptions, {tcp, gen_tcp, tcp_closed}),
+ Opts = ?PUT_INTERNAL_OPT([{asocket, Socket},
+ {asock_owner,self()},
+ {address, Host},
+ {port, Port},
+ {role, server}], Options),
+
+ Profile = ?GET_OPT(profile, Options),
case ssh_system_sup:system_supervisor(Host, Port, Profile) of
undefined ->
- %% It would proably make more sense to call the
- %% address option host but that is a too big change at the
- %% monent. The name is a legacy name!
try sshd_sup:start_child(Opts) of
{error, {already_started, _}} ->
{error, eaddrinuse};
Result = {ok,_} ->
- ssh_acceptor:handle_connection(Callback, Host, Port, Opts, Socket),
- Result;
+ call_ssh_acceptor_handle_connection(Host, Port, Opts, Socket, Result);
Result = {error, _} ->
Result
catch
@@ -376,57 +394,47 @@ do_start_daemon(Socket, SshOptions, SocketOptions) ->
{error, {already_started, _}} ->
{error, eaddrinuse};
{ok, _} ->
- ssh_acceptor:handle_connection(Callback, Host, Port, Opts, Socket),
- {ok, Sup};
+ call_ssh_acceptor_handle_connection(Host, Port, Opts, Socket, {ok,Sup});
Other ->
Other
end
end.
-do_start_daemon(Host0, Port0, SshOptions, SocketOptions) ->
+do_start_daemon(Host0, Port0, Options0) ->
{Host,Port1} =
try
- case proplists:get_value(fd, SocketOptions) of
+ case ?GET_SOCKET_OPT(fd, Options0) of
undefined ->
{Host0,Port0};
Fd when Port0==0 ->
- find_hostport(Fd);
- _ ->
- {Host0,Port0}
+ find_hostport(Fd)
end
catch
_:_ -> throw(bad_fd)
end,
- Profile = proplists:get_value(profile, SshOptions, ?DEFAULT_PROFILE),
- {Port, WaitRequestControl, Opts0} =
+ {Port, WaitRequestControl, Options1} =
case Port1 of
0 -> %% Allocate the socket here to get the port number...
- {_, Callback, _} =
- proplists:get_value(transport, SshOptions, {tcp, gen_tcp, tcp_closed}),
- {ok,LSock} = ssh_acceptor:callback_listen(Callback, 0, SocketOptions),
+ {ok,LSock} = ssh_acceptor:callback_listen(0, Options0),
{ok,{_,LPort}} = inet:sockname(LSock),
{LPort,
- {LSock,Callback},
- [{lsocket,LSock},{lsock_owner,self()}]
+ LSock,
+ ?PUT_INTERNAL_OPT({lsocket,{LSock,self()}}, Options0)
};
_ ->
- {Port1, false, []}
+ {Port1, false, Options0}
end,
- Opts = [{address, Host},
- {port, Port},
- {role, server},
- {socket_opts, SocketOptions},
- {ssh_opts, SshOptions} | Opts0],
+ Options = ?PUT_INTERNAL_OPT([{address, Host},
+ {port, Port},
+ {role, server}], Options1),
+ Profile = ?GET_OPT(profile, Options0),
case ssh_system_sup:system_supervisor(Host, Port, Profile) of
undefined ->
- %% It would proably make more sense to call the
- %% address option host but that is a too big change at the
- %% monent. The name is a legacy name!
- try sshd_sup:start_child(Opts) of
+ try sshd_sup:start_child(Options) of
{error, {already_started, _}} ->
{error, eaddrinuse};
Result = {ok,_} ->
- sync_request_control(WaitRequestControl),
+ sync_request_control(WaitRequestControl, Options),
Result;
Result = {error, _} ->
Result
@@ -434,22 +442,34 @@ do_start_daemon(Host0, Port0, SshOptions, SocketOptions) ->
exit:{noproc, _} ->
{error, ssh_not_started}
end;
- Sup ->
+ Sup ->
AccPid = ssh_system_sup:acceptor_supervisor(Sup),
- case ssh_acceptor_sup:start_child(AccPid, Opts) of
+ case ssh_acceptor_sup:start_child(AccPid, Options) of
{error, {already_started, _}} ->
{error, eaddrinuse};
{ok, _} ->
- sync_request_control(WaitRequestControl),
+ sync_request_control(WaitRequestControl, Options),
{ok, Sup};
Other ->
Other
end
end.
-sync_request_control(false) ->
+call_ssh_acceptor_handle_connection(Host, Port, Options, Socket, DefaultResult) ->
+ {_, Callback, _} = ?GET_OPT(transport, Options),
+ try ssh_acceptor:handle_connection(Callback, Host, Port, Options, Socket)
+ of
+ {error,Error} -> {error,Error};
+ _ -> DefaultResult
+ catch
+ C:R -> {error,{could_not_start_connection,{C,R}}}
+ end.
+
+
+sync_request_control(false, _Options) ->
ok;
-sync_request_control({LSock,Callback}) ->
+sync_request_control(LSock, Options) ->
+ {_, Callback, _} = ?GET_OPT(transport, Options),
receive
{request_control,LSock,ReqPid} ->
ok = Callback:controlling_process(LSock, ReqPid),
@@ -465,512 +485,6 @@ find_hostport(Fd) ->
ok = inet:close(S),
HostPort.
-
-handle_options(Opts) ->
- try handle_option(algs_compatibility(proplists:unfold(Opts)), [], []) of
- {Inet, Ssh} ->
- {handle_ip(Inet), Ssh}
- catch
- throw:Error ->
- Error
- end.
-
-
-algs_compatibility(Os0) ->
- %% Take care of old options 'public_key_alg' and 'pref_public_key_algs'
- case proplists:get_value(public_key_alg, Os0) of
- undefined ->
- Os0;
- A when is_atom(A) ->
- %% Skip public_key_alg if pref_public_key_algs is defined:
- Os = lists:keydelete(public_key_alg, 1, Os0),
- case proplists:get_value(pref_public_key_algs,Os) of
- undefined when A == 'ssh-rsa' ; A==ssh_rsa ->
- [{pref_public_key_algs,['ssh-rsa','ssh-dss']} | Os];
- undefined when A == 'ssh-dss' ; A==ssh_dsa ->
- [{pref_public_key_algs,['ssh-dss','ssh-rsa']} | Os];
- undefined ->
- throw({error, {eoptions, {public_key_alg,A} }});
- _ ->
- Os
- end;
- V ->
- throw({error, {eoptions, {public_key_alg,V} }})
- end.
-
-
-handle_option([], SocketOptions, SshOptions) ->
- {SocketOptions, SshOptions};
-handle_option([{system_dir, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{user_dir, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{user_dir_fun, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{silently_accept_hosts, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{user_interaction, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{connect_timeout, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{user, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{dsa_pass_phrase, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{rsa_pass_phrase, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{password, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{user_passwords, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{pwdfun, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{key_cb, {Module, Options}} | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option({key_cb, Module}),
- handle_ssh_priv_option({key_cb_private, Options}) |
- SshOptions]);
-handle_option([{key_cb, Module} | Rest], SocketOptions, SshOptions) ->
- handle_option([{key_cb, {Module, []}} | Rest], SocketOptions, SshOptions);
-handle_option([{keyboard_interact_fun, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-%%Backwards compatibility
-handle_option([{allow_user_interaction, Value} | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option({user_interaction, Value}) | SshOptions]);
-handle_option([{infofun, _} = Opt | Rest],SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{connectfun, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{disconnectfun, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{unexpectedfun, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{failfun, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{ssh_msg_debug_fun, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-%%Backwards compatibility should not be underscore between ip and v6 in API
-handle_option([{ip_v6_disabled, Value} | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option({ipv6_disabled, Value}) | SshOptions]);
-handle_option([{ipv6_disabled, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{transport, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{subsystems, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{ssh_cli, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{shell, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{exec, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{auth_methods, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{auth_method_kb_interactive_data, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{pref_public_key_algs, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{preferred_algorithms,_} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{dh_gex_groups,_} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{dh_gex_limits,_} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{quiet_mode, _} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{idle_time, _} = Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{rekey_limit, _} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{max_sessions, _} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{max_channels, _} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{negotiation_timeout, _} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{parallel_login, _} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-%% (Is handled by proplists:unfold above:)
-%% handle_option([parallel_login|Rest], SocketOptions, SshOptions) ->
-%% handle_option(Rest, SocketOptions, [handle_ssh_option({parallel_login,true}) | SshOptions]);
-handle_option([{minimal_remote_max_packet_size, _} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{id_string, _ID} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{profile, _ID} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{max_random_length_padding, _Bool} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([{tstflg, _} = Opt|Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, SocketOptions, [handle_ssh_option(Opt) | SshOptions]);
-handle_option([Opt | Rest], SocketOptions, SshOptions) ->
- handle_option(Rest, [handle_inet_option(Opt) | SocketOptions], SshOptions).
-
-
-handle_ssh_option({tstflg,_F} = Opt) -> Opt;
-handle_ssh_option({minimal_remote_max_packet_size, Value} = Opt) when is_integer(Value), Value >=0 ->
- Opt;
-handle_ssh_option({system_dir, Value} = Opt) when is_list(Value) ->
- check_dir(Opt);
-handle_ssh_option({user_dir, Value} = Opt) when is_list(Value) ->
- check_dir(Opt);
-handle_ssh_option({user_dir_fun, Value} = Opt) when is_function(Value) ->
- Opt;
-handle_ssh_option({silently_accept_hosts, Value} = Opt) when is_boolean(Value) ->
- Opt;
-handle_ssh_option({silently_accept_hosts, Value} = Opt) when is_function(Value,2) ->
- Opt;
-handle_ssh_option({silently_accept_hosts, {DigestAlg,Value}} = Opt) when is_function(Value,2) ->
- case lists:member(DigestAlg, [md5, sha, sha224, sha256, sha384, sha512]) of
- true ->
- Opt;
- false ->
- throw({error, {eoptions, Opt}})
- end;
-handle_ssh_option({user_interaction, Value} = Opt) when is_boolean(Value) ->
- Opt;
-handle_ssh_option({preferred_algorithms,[_|_]} = Opt) ->
- handle_pref_algs(Opt);
-
-handle_ssh_option({dh_gex_groups,L0}) when is_list(L0) ->
- {dh_gex_groups,
- collect_per_size(
- lists:foldl(
- fun({N,G,P}, Acc) when is_integer(N),N>0,
- is_integer(G),G>0,
- is_integer(P),P>0 ->
- [{N,{G,P}} | Acc];
- ({N,{G,P}}, Acc) when is_integer(N),N>0,
- is_integer(G),G>0,
- is_integer(P),P>0 ->
- [{N,{G,P}} | Acc];
- ({N,GPs}, Acc) when is_list(GPs) ->
- lists:foldr(fun({Gi,Pi}, Acci) when is_integer(Gi),Gi>0,
- is_integer(Pi),Pi>0 ->
- [{N,{Gi,Pi}} | Acci]
- end, Acc, GPs)
- end, [], L0))};
-
-handle_ssh_option({dh_gex_groups,{Tag,File=[C|_]}}=Opt) when is_integer(C), C>0,
- Tag == file ;
- Tag == ssh_moduli_file ->
- {ok,GroupDefs} =
- case Tag of
- file ->
- file:consult(File);
- ssh_moduli_file ->
- case file:open(File,[read]) of
- {ok,D} ->
- try
- {ok,Moduli} = read_moduli_file(D, 1, []),
- file:close(D),
- {ok, Moduli}
- catch
- _:_ ->
- throw({error, {{eoptions, Opt}, "Bad format in file "++File}})
- end;
- {error,enoent} ->
- throw({error, {{eoptions, Opt}, "File not found:"++File}});
- {error,Error} ->
- throw({error, {{eoptions, Opt}, io_lib:format("Error reading file ~s: ~p",[File,Error])}})
- end
- end,
-
- try
- handle_ssh_option({dh_gex_groups,GroupDefs})
- catch
- _:_ ->
- throw({error, {{eoptions, Opt}, "Bad format in file: "++File}})
- end;
-
-
-handle_ssh_option({dh_gex_limits,{Min,Max}} = Opt) when is_integer(Min), Min>0,
- is_integer(Max), Max>=Min ->
- %% Server
- Opt;
-handle_ssh_option({dh_gex_limits,{Min,I,Max}} = Opt) when is_integer(Min), Min>0,
- is_integer(I), I>=Min,
- is_integer(Max), Max>=I ->
- %% Client
- Opt;
-handle_ssh_option({pref_public_key_algs, Value} = Opt) when is_list(Value), length(Value) >= 1 ->
- case handle_user_pref_pubkey_algs(Value, []) of
- {true, NewOpts} ->
- {pref_public_key_algs, NewOpts};
- _ ->
- throw({error, {eoptions, Opt}})
- end;
-handle_ssh_option({connect_timeout, Value} = Opt) when is_integer(Value); Value == infinity ->
- Opt;
-handle_ssh_option({max_sessions, Value} = Opt) when is_integer(Value), Value>0 ->
- Opt;
-handle_ssh_option({max_channels, Value} = Opt) when is_integer(Value), Value>0 ->
- Opt;
-handle_ssh_option({negotiation_timeout, Value} = Opt) when is_integer(Value); Value == infinity ->
- Opt;
-handle_ssh_option({parallel_login, Value} = Opt) when Value==true ; Value==false ->
- Opt;
-handle_ssh_option({user, Value} = Opt) when is_list(Value) ->
- Opt;
-handle_ssh_option({dsa_pass_phrase, Value} = Opt) when is_list(Value) ->
- Opt;
-handle_ssh_option({rsa_pass_phrase, Value} = Opt) when is_list(Value) ->
- Opt;
-handle_ssh_option({password, Value} = Opt) when is_list(Value) ->
- Opt;
-handle_ssh_option({user_passwords, Value} = Opt) when is_list(Value)->
- Opt;
-handle_ssh_option({pwdfun, Value} = Opt) when is_function(Value,2) ->
- Opt;
-handle_ssh_option({pwdfun, Value} = Opt) when is_function(Value,4) ->
- Opt;
-handle_ssh_option({key_cb, Value} = Opt) when is_atom(Value) ->
- Opt;
-handle_ssh_option({key_cb, {CallbackMod, CallbackOptions}} = Opt) when is_atom(CallbackMod),
- is_list(CallbackOptions) ->
- Opt;
-handle_ssh_option({keyboard_interact_fun, Value} = Opt) when is_function(Value,3) ->
- Opt;
-handle_ssh_option({compression, Value} = Opt) when is_atom(Value) ->
- Opt;
-handle_ssh_option({exec, {Module, Function, _}} = Opt) when is_atom(Module),
- is_atom(Function) ->
- Opt;
-handle_ssh_option({exec, Function} = Opt) when is_function(Function) ->
- Opt;
-handle_ssh_option({auth_methods, Value} = Opt) when is_list(Value) ->
- Opt;
-handle_ssh_option({auth_method_kb_interactive_data, {Name,Instruction,Prompt,Echo}} = Opt) when is_list(Name),
- is_list(Instruction),
- is_list(Prompt),
- is_boolean(Echo) ->
- Opt;
-handle_ssh_option({auth_method_kb_interactive_data, F} = Opt) when is_function(F,3) ->
- Opt;
-handle_ssh_option({infofun, Value} = Opt) when is_function(Value) ->
- Opt;
-handle_ssh_option({connectfun, Value} = Opt) when is_function(Value) ->
- Opt;
-handle_ssh_option({disconnectfun, Value} = Opt) when is_function(Value) ->
- Opt;
-handle_ssh_option({unexpectedfun, Value} = Opt) when is_function(Value,2) ->
- Opt;
-handle_ssh_option({failfun, Value} = Opt) when is_function(Value) ->
- Opt;
-handle_ssh_option({ssh_msg_debug_fun, Value} = Opt) when is_function(Value,4) ->
- Opt;
-
-handle_ssh_option({ipv6_disabled, Value} = Opt) when is_boolean(Value) ->
- throw({error, {{ipv6_disabled, Opt}, option_no_longer_valid_use_inet_option_instead}});
-handle_ssh_option({transport, {Protocol, Cb, ClosTag}} = Opt) when is_atom(Protocol),
- is_atom(Cb),
- is_atom(ClosTag) ->
- Opt;
-handle_ssh_option({subsystems, Value} = Opt) when is_list(Value) ->
- Opt;
-handle_ssh_option({ssh_cli, {Cb, _}}= Opt) when is_atom(Cb) ->
- Opt;
-handle_ssh_option({ssh_cli, no_cli} = Opt) ->
- Opt;
-handle_ssh_option({shell, {Module, Function, _}} = Opt) when is_atom(Module),
- is_atom(Function) ->
- Opt;
-handle_ssh_option({shell, Value} = Opt) when is_function(Value) ->
- Opt;
-handle_ssh_option({quiet_mode, Value} = Opt) when is_boolean(Value) ->
- Opt;
-handle_ssh_option({idle_time, Value} = Opt) when is_integer(Value), Value > 0 ->
- Opt;
-handle_ssh_option({rekey_limit, Value} = Opt) when is_integer(Value) ->
- Opt;
-handle_ssh_option({id_string, random}) ->
- {id_string, {random,2,5}}; %% 2 - 5 random characters
-handle_ssh_option({id_string, ID} = Opt) when is_list(ID) ->
- Opt;
-handle_ssh_option({max_random_length_padding, Value} = Opt) when is_integer(Value),
- Value =< 255 ->
- Opt;
-handle_ssh_option({profile, Value} = Opt) when is_atom(Value) ->
- Opt;
-handle_ssh_option(Opt) ->
- throw({error, {eoptions, Opt}}).
-
-handle_ssh_priv_option({key_cb_private, Value} = Opt) when is_list(Value) ->
- Opt.
-
-handle_inet_option({active, _} = Opt) ->
- throw({error, {{eoptions, Opt}, "SSH has built in flow control, "
- "and active is handled internally, user is not allowed"
- "to specify this option"}});
-
-handle_inet_option({inet, Value}) when (Value == inet) or (Value == inet6) ->
- Value;
-handle_inet_option({reuseaddr, _} = Opt) ->
- throw({error, {{eoptions, Opt},"Is set internally, user is not allowed"
- "to specify this option"}});
-%% Option verified by inet
-handle_inet_option(Opt) ->
- Opt.
-
-
-%% Check preferred algs
-
-handle_pref_algs({preferred_algorithms,Algs}) ->
- try alg_duplicates(Algs, [], []) of
- [] ->
- {preferred_algorithms,
- [try ssh_transport:supported_algorithms(Key)
- of
- DefAlgs -> handle_pref_alg(Key,Vals,DefAlgs)
- catch
- _:_ -> throw({error, {{eoptions, {preferred_algorithms,Key}},
- "Bad preferred_algorithms key"}})
- end || {Key,Vals} <- Algs]
- };
-
- Dups ->
- throw({error, {{eoptions, {preferred_algorithms,Dups}}, "Duplicates found"}})
- catch
- _:_ ->
- throw({error, {{eoptions, preferred_algorithms}, "Malformed"}})
- end.
-
-alg_duplicates([{K,V}|KVs], Ks, Dups0) ->
- Dups =
- case lists:member(K,Ks) of
- true ->
- [K|Dups0];
- false ->
- Dups0
- end,
- case V--lists:usort(V) of
- [] ->
- alg_duplicates(KVs, [K|Ks], Dups);
- Ds ->
- alg_duplicates(KVs, [K|Ks], Dups++Ds)
- end;
-alg_duplicates([], _Ks, Dups) ->
- Dups.
-
-handle_pref_alg(Key,
- Vs=[{client2server,C2Ss=[_|_]},{server2client,S2Cs=[_|_]}],
- [{client2server,Sup_C2Ss},{server2client,Sup_S2Cs}]
- ) ->
- chk_alg_vs(Key, C2Ss, Sup_C2Ss),
- chk_alg_vs(Key, S2Cs, Sup_S2Cs),
- {Key, Vs};
-
-handle_pref_alg(Key,
- Vs=[{server2client,[_|_]},{client2server,[_|_]}],
- Sup=[{client2server,_},{server2client,_}]
- ) ->
- handle_pref_alg(Key, lists:reverse(Vs), Sup);
-
-handle_pref_alg(Key,
- Vs=[V|_],
- Sup=[{client2server,_},{server2client,_}]
- ) when is_atom(V) ->
- handle_pref_alg(Key, [{client2server,Vs},{server2client,Vs}], Sup);
-
-handle_pref_alg(Key,
- Vs=[V|_],
- Sup=[S|_]
- ) when is_atom(V), is_atom(S) ->
- chk_alg_vs(Key, Vs, Sup),
- {Key, Vs};
-
-handle_pref_alg(Key, Vs, _) ->
- throw({error, {{eoptions, {preferred_algorithms,[{Key,Vs}]}}, "Badly formed list"}}).
-
-chk_alg_vs(OptKey, Values, SupportedValues) ->
- case (Values -- SupportedValues) of
- [] -> Values;
- Bad -> throw({error, {{eoptions, {OptKey,Bad}}, "Unsupported value(s) found"}})
- end.
-
-handle_ip(Inet) -> %% Default to ipv4
- case lists:member(inet, Inet) of
- true ->
- Inet;
- false ->
- case lists:member(inet6, Inet) of
- true ->
- Inet;
- false ->
- [inet | Inet]
- end
- end.
-
-check_dir({_,Dir} = Opt) ->
- case directory_exist_readable(Dir) of
- ok ->
- Opt;
- {error,Error} ->
- throw({error, {eoptions,{Opt,Error}}})
- end.
-
-directory_exist_readable(Dir) ->
- case file:read_file_info(Dir) of
- {ok, #file_info{type = directory,
- access = Access}} ->
- case Access of
- read -> ok;
- read_write -> ok;
- _ -> {error, eacces}
- end;
-
- {ok, #file_info{}}->
- {error, enotdir};
-
- {error, Error} ->
- {error, Error}
- end.
-
-
-
-collect_per_size(L) ->
- lists:foldr(
- fun({Sz,GP}, [{Sz,GPs}|Acc]) -> [{Sz,[GP|GPs]}|Acc];
- ({Sz,GP}, Acc) -> [{Sz,[GP]}|Acc]
- end, [], lists:sort(L)).
-
-read_moduli_file(D, I, Acc) ->
- case io:get_line(D,"") of
- {error,Error} ->
- {error,Error};
- eof ->
- {ok, Acc};
- "#" ++ _ -> read_moduli_file(D, I+1, Acc);
- <<"#",_/binary>> -> read_moduli_file(D, I+1, Acc);
- Data ->
- Line = if is_binary(Data) -> binary_to_list(Data);
- is_list(Data) -> Data
- end,
- try
- [_Time,_Type,_Tests,_Tries,Size,G,P] = string:tokens(Line," \r\n"),
- M = {list_to_integer(Size),
- {list_to_integer(G), list_to_integer(P,16)}
- },
- read_moduli_file(D, I+1, [M|Acc])
- catch
- _:_ ->
- read_moduli_file(D, I+1, Acc)
- end
- end.
-
-handle_user_pref_pubkey_algs([], Acc) ->
- {true, lists:reverse(Acc)};
-handle_user_pref_pubkey_algs([H|T], Acc) ->
- case lists:member(H, ?SUPPORTED_USER_KEYS) of
- true ->
- handle_user_pref_pubkey_algs(T, [H| Acc]);
-
- false when H==ssh_dsa -> handle_user_pref_pubkey_algs(T, ['ssh-dss'| Acc]);
- false when H==ssh_rsa -> handle_user_pref_pubkey_algs(T, ['ssh-rsa'| Acc]);
-
- false ->
- false
- end.
-
fmt_host({A,B,C,D}) ->
lists:concat([A,".",B,".",C,".",D]);
fmt_host(T={_,_,_,_,_,_,_,_}) ->
diff --git a/lib/ssh/src/ssh.hrl b/lib/ssh/src/ssh.hrl
index 4cd91177f6..c1ba58ed40 100644
--- a/lib/ssh/src/ssh.hrl
+++ b/lib/ssh/src/ssh.hrl
@@ -33,6 +33,10 @@
-define(REKEY_DATA_TIMOUT, 60000).
-define(DEFAULT_PROFILE, default).
+-define(DEFAULT_TRANSPORT, {tcp, gen_tcp, tcp_closed} ).
+
+-define(MAX_RND_PADDING_LEN, 15).
+
-define(SUPPORTED_AUTH_METHODS, "publickey,keyboard-interactive,password").
-define(SUPPORTED_USER_KEYS, ['ssh-rsa','ssh-dss','ecdsa-sha2-nistp256','ecdsa-sha2-nistp384','ecdsa-sha2-nistp521']).
@@ -64,10 +68,49 @@
-define(string_utf8(X), << ?STRING(unicode:characters_to_binary(X)) >> ).
-define(binary(X), << ?STRING(X) >>).
+%% Cipher details
-define(SSH_CIPHER_NONE, 0).
-define(SSH_CIPHER_3DES, 3).
-define(SSH_CIPHER_AUTHFILE, ?SSH_CIPHER_3DES).
+%% Option access macros
+-define(do_get_opt(C,K,O), ssh_options:get_value(C,K,O, ?MODULE,?LINE)).
+-define(do_get_opt(C,K,O,D), ssh_options:get_value(C,K,O,D,?MODULE,?LINE)).
+
+-define(GET_OPT(Key,Opts), ?do_get_opt(user_options, Key,Opts ) ).
+-define(GET_INTERNAL_OPT(Key,Opts), ?do_get_opt(internal_options,Key,Opts ) ).
+-define(GET_INTERNAL_OPT(Key,Opts,Def), ?do_get_opt(internal_options,Key,Opts,Def) ).
+-define(GET_SOCKET_OPT(Key,Opts), ?do_get_opt(socket_options, Key,Opts ) ).
+-define(GET_SOCKET_OPT(Key,Opts,Def), ?do_get_opt(socket_options, Key,Opts,Def) ).
+
+-define(do_put_opt(C,KV,O), ssh_options:put_value(C,KV,O, ?MODULE,?LINE)).
+
+-define(PUT_OPT(KeyVal,Opts), ?do_put_opt(user_options, KeyVal,Opts) ).
+-define(PUT_INTERNAL_OPT(KeyVal,Opts), ?do_put_opt(internal_options,KeyVal,Opts) ).
+-define(PUT_SOCKET_OPT(KeyVal,Opts), ?do_put_opt(socket_options, KeyVal,Opts) ).
+
+%% Types
+-type role() :: client | server .
+-type ok_error(SuccessType) :: {ok, SuccessType} | {error, any()} .
+-type daemon_ref() :: pid() .
+
+-type subsystem_spec() :: {subsystem_name(), {channel_callback(), channel_init_args()}} .
+-type subsystem_name() :: string() .
+-type channel_callback() :: atom() .
+-type channel_init_args() :: list() .
+
+-type algs_list() :: list( alg_entry() ).
+-type alg_entry() :: {kex, simple_algs()}
+ | {public_key, simple_algs()}
+ | {cipher, double_algs()}
+ | {mac, double_algs()}
+ | {compression, double_algs()} .
+-type simple_algs() :: list( atom() ) .
+-type double_algs() :: list( {client2serverlist,simple_algs()} | {server2client,simple_algs()} )
+ | simple_algs() .
+
+
+%% Records
-record(ssh,
{
role, %% client | server
@@ -127,7 +170,7 @@
recv_sequence = 0,
keyex_key,
keyex_info,
- random_length_padding = 15, % From RFC 4253 section 6.
+ random_length_padding = ?MAX_RND_PADDING_LEN, % From RFC 4253 section 6.
%% User auth
user,
diff --git a/lib/ssh/src/ssh_acceptor.erl b/lib/ssh/src/ssh_acceptor.erl
index 13c9d9af4a..42be18f2ad 100644
--- a/lib/ssh/src/ssh_acceptor.erl
+++ b/lib/ssh/src/ssh_acceptor.erl
@@ -25,56 +25,63 @@
-include("ssh.hrl").
%% Internal application API
--export([start_link/5,
+-export([start_link/4,
number_of_connections/1,
- callback_listen/3,
+ callback_listen/2,
handle_connection/5]).
%% spawn export
--export([acceptor_init/6, acceptor_loop/6]).
+-export([acceptor_init/5, acceptor_loop/6]).
-define(SLEEP_TIME, 200).
%%====================================================================
%% Internal application API
%%====================================================================
-start_link(Port, Address, SockOpts, Opts, AcceptTimeout) ->
- Args = [self(), Port, Address, SockOpts, Opts, AcceptTimeout],
+start_link(Port, Address, Options, AcceptTimeout) ->
+ Args = [self(), Port, Address, Options, AcceptTimeout],
proc_lib:start_link(?MODULE, acceptor_init, Args).
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-acceptor_init(Parent, Port, Address, SockOpts, Opts, AcceptTimeout) ->
- {_, Callback, _} =
- proplists:get_value(transport, Opts, {tcp, gen_tcp, tcp_closed}),
-
- SockOwner = proplists:get_value(lsock_owner, Opts),
- LSock = proplists:get_value(lsocket, Opts),
- UseExistingSocket =
- case catch inet:sockname(LSock) of
- {ok,{_,Port}} -> is_pid(SockOwner);
- _ -> false
- end,
-
- case UseExistingSocket of
- true ->
- proc_lib:init_ack(Parent, {ok, self()}),
+acceptor_init(Parent, Port, Address, Opts, AcceptTimeout) ->
+ {_, Callback, _} = ?GET_OPT(transport, Opts),
+ try
+ {LSock0,SockOwner0} = ?GET_INTERNAL_OPT(lsocket, Opts),
+ true = is_pid(SockOwner0),
+ {ok,{_,Port}} = inet:sockname(LSock0),
+ {LSock0, SockOwner0}
+ of
+ {LSock, SockOwner} ->
+ %% Use existing socket
+ proc_lib:init_ack(Parent, {ok, self()}),
request_ownership(LSock, SockOwner),
- acceptor_loop(Callback, Port, Address, Opts, LSock, AcceptTimeout);
-
- false ->
- case (catch do_socket_listen(Callback, Port, SockOpts)) of
- {ok, ListenSocket} ->
- proc_lib:init_ack(Parent, {ok, self()}),
- acceptor_loop(Callback,
- Port, Address, Opts, ListenSocket, AcceptTimeout);
- Error ->
- proc_lib:init_ack(Parent, Error),
- error
- end
+ acceptor_loop(Callback, Port, Address, Opts, LSock, AcceptTimeout)
+ catch
+ error:{badkey,lsocket} ->
+ %% Open new socket
+ try
+ socket_listen(Port, Opts)
+ of
+ {ok, ListenSocket} ->
+ proc_lib:init_ack(Parent, {ok, self()}),
+ {_, Callback, _} = ?GET_OPT(transport, Opts),
+ acceptor_loop(Callback,
+ Port, Address, Opts, ListenSocket, AcceptTimeout);
+ {error,Error} ->
+ proc_lib:init_ack(Parent, Error),
+ {error,Error}
+ catch
+ _:_ ->
+ {error,listen_socket_failed}
+ end;
+
+ _:_ ->
+ {error,use_existing_socket_failed}
end.
+
request_ownership(LSock, SockOwner) ->
SockOwner ! {request_control,LSock,self()},
receive
@@ -82,23 +89,25 @@ request_ownership(LSock, SockOwner) ->
end.
-do_socket_listen(Callback, Port0, Opts) ->
- Port =
- case proplists:get_value(fd, Opts) of
- undefined -> Port0;
- _ -> 0
- end,
- callback_listen(Callback, Port, Opts).
-
-callback_listen(Callback, Port, Opts0) ->
- Opts = [{active, false}, {reuseaddr,true} | Opts0],
- case Callback:listen(Port, Opts) of
+socket_listen(Port0, Opts) ->
+ Port = case ?GET_SOCKET_OPT(fd, Opts) of
+ undefined -> Port0;
+ _ -> 0
+ end,
+ callback_listen(Port, Opts).
+
+
+callback_listen(Port, Opts0) ->
+ {_, Callback, _} = ?GET_OPT(transport, Opts0),
+ Opts = ?PUT_SOCKET_OPT([{active, false}, {reuseaddr,true}], Opts0),
+ SockOpts = ?GET_OPT(socket_options, Opts),
+ case Callback:listen(Port, SockOpts) of
{error, nxdomain} ->
- Callback:listen(Port, lists:delete(inet6, Opts));
+ Callback:listen(Port, lists:delete(inet6, SockOpts));
{error, enetunreach} ->
- Callback:listen(Port, lists:delete(inet6, Opts));
+ Callback:listen(Port, lists:delete(inet6, SockOpts));
{error, eafnosupport} ->
- Callback:listen(Port, lists:delete(inet6, Opts));
+ Callback:listen(Port, lists:delete(inet6, SockOpts));
Other ->
Other
end.
@@ -120,21 +129,21 @@ acceptor_loop(Callback, Port, Address, Opts, ListenSocket, AcceptTimeout) ->
end.
handle_connection(Callback, Address, Port, Options, Socket) ->
- SSHopts = proplists:get_value(ssh_opts, Options, []),
- Profile = proplists:get_value(profile, SSHopts, ?DEFAULT_PROFILE),
+ Profile = ?GET_OPT(profile, Options),
SystemSup = ssh_system_sup:system_supervisor(Address, Port, Profile),
- MaxSessions = proplists:get_value(max_sessions,SSHopts,infinity),
+ MaxSessions = ?GET_OPT(max_sessions, Options),
case number_of_connections(SystemSup) < MaxSessions of
true ->
{ok, SubSysSup} = ssh_system_sup:start_subsystem(SystemSup, Options),
ConnectionSup = ssh_subsystem_sup:connection_supervisor(SubSysSup),
- Timeout = proplists:get_value(negotiation_timeout, SSHopts, 2*60*1000),
+ NegTimeout = ?GET_OPT(negotiation_timeout, Options),
ssh_connection_handler:start_connection(server, Socket,
- [{supervisors, [{system_sup, SystemSup},
- {subsystem_sup, SubSysSup},
- {connection_sup, ConnectionSup}]}
- | Options], Timeout);
+ ?PUT_INTERNAL_OPT(
+ {supervisors, [{system_sup, SystemSup},
+ {subsystem_sup, SubSysSup},
+ {connection_sup, ConnectionSup}]},
+ Options), NegTimeout);
false ->
Callback:close(Socket),
IPstr = if is_tuple(Address) -> inet:ntoa(Address);
diff --git a/lib/ssh/src/ssh_acceptor_sup.erl b/lib/ssh/src/ssh_acceptor_sup.erl
index 129f85a3e0..77f7826918 100644
--- a/lib/ssh/src/ssh_acceptor_sup.erl
+++ b/lib/ssh/src/ssh_acceptor_sup.erl
@@ -44,14 +44,13 @@
start_link(Servers) ->
supervisor:start_link(?MODULE, [Servers]).
-start_child(AccSup, ServerOpts) ->
- Spec = child_spec(ServerOpts),
+start_child(AccSup, Options) ->
+ Spec = child_spec(Options),
case supervisor:start_child(AccSup, Spec) of
{error, already_present} ->
- Address = proplists:get_value(address, ServerOpts),
- Port = proplists:get_value(port, ServerOpts),
- Profile = proplists:get_value(profile,
- proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE),
+ Address = ?GET_INTERNAL_OPT(address, Options),
+ Port = ?GET_INTERNAL_OPT(port, Options),
+ Profile = ?GET_OPT(profile, Options),
stop_child(AccSup, Address, Port, Profile),
supervisor:start_child(AccSup, Spec);
Reply ->
@@ -70,24 +69,23 @@ stop_child(AccSup, Address, Port, Profile) ->
%%%=========================================================================
%%% Supervisor callback
%%%=========================================================================
-init([ServerOpts]) ->
+init([Options]) ->
RestartStrategy = one_for_one,
MaxR = 10,
MaxT = 3600,
- Children = [child_spec(ServerOpts)],
+ Children = [child_spec(Options)],
{ok, {{RestartStrategy, MaxR, MaxT}, Children}}.
%%%=========================================================================
%%% Internal functions
%%%=========================================================================
-child_spec(ServerOpts) ->
- Address = proplists:get_value(address, ServerOpts),
- Port = proplists:get_value(port, ServerOpts),
- Timeout = proplists:get_value(timeout, ServerOpts, ?DEFAULT_TIMEOUT),
- Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE),
+child_spec(Options) ->
+ Address = ?GET_INTERNAL_OPT(address, Options),
+ Port = ?GET_INTERNAL_OPT(port, Options),
+ Timeout = ?GET_INTERNAL_OPT(timeout, Options, ?DEFAULT_TIMEOUT),
+ Profile = ?GET_OPT(profile, Options),
Name = id(Address, Port, Profile),
- SocketOpts = proplists:get_value(socket_opts, ServerOpts),
- StartFunc = {ssh_acceptor, start_link, [Port, Address, SocketOpts, ServerOpts, Timeout]},
+ StartFunc = {ssh_acceptor, start_link, [Port, Address, Options, Timeout]},
Restart = transient,
Shutdown = brutal_kill,
Modules = [ssh_acceptor],
diff --git a/lib/ssh/src/ssh_auth.erl b/lib/ssh/src/ssh_auth.erl
index 9b54ecb2dd..88c8144063 100644
--- a/lib/ssh/src/ssh_auth.erl
+++ b/lib/ssh/src/ssh_auth.erl
@@ -96,14 +96,14 @@ unique(L) ->
password_msg([#ssh{opts = Opts, io_cb = IoCb,
user = User, service = Service} = Ssh0]) ->
{Password,Ssh} =
- case proplists:get_value(password, Opts) of
+ case ?GET_OPT(password, Opts) of
undefined when IoCb == ssh_no_io ->
{not_ok, Ssh0};
undefined ->
- {IoCb:read_password("ssh password: ",Ssh0), Ssh0};
+ {IoCb:read_password("ssh password: ",Opts), Ssh0};
PW ->
%% If "password" option is given it should not be tried again
- {PW, Ssh0#ssh{opts = lists:keyreplace(password,1,Opts,{password,not_ok})}}
+ {PW, Ssh0#ssh{opts = ?PUT_OPT({password,not_ok}, Opts)}}
end,
case Password of
not_ok ->
@@ -123,7 +123,7 @@ password_msg([#ssh{opts = Opts, io_cb = IoCb,
keyboard_interactive_msg([#ssh{user = User,
opts = Opts,
service = Service} = Ssh]) ->
- case proplists:get_value(password, Opts) of
+ case ?GET_OPT(password, Opts) of
not_ok ->
{not_ok,Ssh}; % No need to use a failed pwd once more
_ ->
@@ -141,8 +141,9 @@ publickey_msg([Alg, #ssh{user = User,
service = Service,
opts = Opts} = Ssh]) ->
Hash = ssh_transport:sha(Alg),
- KeyCb = proplists:get_value(key_cb, Opts, ssh_file),
- case KeyCb:user_key(Alg, Opts) of
+ {KeyCb,KeyCbOpts} = ?GET_OPT(key_cb, Opts),
+ UserOpts = ?GET_OPT(user_options, Opts),
+ case KeyCb:user_key(Alg, [{key_cb_private,KeyCbOpts}|UserOpts]) of
{ok, PrivKey} ->
StrAlgo = atom_to_list(Alg),
case encode_public_key(StrAlgo, ssh_transport:extract_public_key(PrivKey)) of
@@ -174,13 +175,19 @@ service_request_msg(Ssh) ->
%%%----------------------------------------------------------------
init_userauth_request_msg(#ssh{opts = Opts} = Ssh) ->
- case user_name(Opts) of
- {ok, User} ->
+ case ?GET_OPT(user, Opts) of
+ undefined ->
+ ErrStr = "Could not determine the users name",
+ ssh_connection_handler:disconnect(
+ #ssh_msg_disconnect{code = ?SSH_DISCONNECT_ILLEGAL_USER_NAME,
+ description = ErrStr});
+
+ User ->
Msg = #ssh_msg_userauth_request{user = User,
service = "ssh-connection",
method = "none",
data = <<>>},
- Algs0 = proplists:get_value(pref_public_key_algs, Opts, ?SUPPORTED_USER_KEYS),
+ Algs0 = ?GET_OPT(pref_public_key_algs, Opts),
%% The following line is not strictly correct. The call returns the
%% supported HOST key types while we are interested in USER keys. However,
%% they "happens" to be the same (for now). This could change....
@@ -194,12 +201,7 @@ init_userauth_request_msg(#ssh{opts = Opts} = Ssh) ->
ssh_transport:ssh_packet(Msg, Ssh#ssh{user = User,
userauth_preference = Prefs,
userauth_methods = none,
- service = "ssh-connection"});
- {error, no_user} ->
- ErrStr = "Could not determine the users name",
- ssh_connection_handler:disconnect(
- #ssh_msg_disconnect{code = ?SSH_DISCONNECT_ILLEGAL_USER_NAME,
- description = ErrStr})
+ service = "ssh-connection"})
end.
%%%----------------------------------------------------------------
@@ -342,7 +344,7 @@ handle_userauth_request(#ssh_msg_userauth_request{user = User,
false},
{Name, Instruction, Prompt, Echo} =
- case proplists:get_value(auth_method_kb_interactive_data, Opts) of
+ case ?GET_OPT(auth_method_kb_interactive_data, Opts) of
undefined ->
Default;
{_,_,_,_}=V ->
@@ -407,9 +409,9 @@ handle_userauth_info_response(#ssh_msg_userauth_info_response{num_responses = 1,
user = User,
userauth_supported_methods = Methods} = Ssh) ->
SendOneEmpty =
- (proplists:get_value(tstflg,Opts) == one_empty)
+ (?GET_OPT(tstflg,Opts) == one_empty)
orelse
- proplists:get_value(one_empty, proplists:get_value(tstflg,Opts,[]), false),
+ proplists:get_value(one_empty, ?GET_OPT(tstflg,Opts), false),
case check_password(User, unicode:characters_to_list(Password), Opts, Ssh) of
{true,Ssh1} when SendOneEmpty==true ->
@@ -460,27 +462,8 @@ method_preference(Algs) ->
],
Algs).
-user_name(Opts) ->
- Env = case os:type() of
- {win32, _} ->
- "USERNAME";
- {unix, _} ->
- "LOGNAME"
- end,
- case proplists:get_value(user, Opts, os:getenv(Env)) of
- false ->
- case os:getenv("USER") of
- false ->
- {error, no_user};
- User ->
- {ok, User}
- end;
- User ->
- {ok, User}
- end.
-
check_password(User, Password, Opts, Ssh) ->
- case proplists:get_value(pwdfun, Opts) of
+ case ?GET_OPT(pwdfun, Opts) of
undefined ->
Static = get_password_option(Opts, User),
{Password == Static, Ssh};
@@ -510,17 +493,18 @@ check_password(User, Password, Opts, Ssh) ->
end.
get_password_option(Opts, User) ->
- Passwords = proplists:get_value(user_passwords, Opts, []),
+ Passwords = ?GET_OPT(user_passwords, Opts),
case lists:keysearch(User, 1, Passwords) of
{value, {User, Pw}} -> Pw;
- false -> proplists:get_value(password, Opts, false)
+ false -> ?GET_OPT(password, Opts)
end.
pre_verify_sig(User, Alg, KeyBlob, Opts) ->
try
{ok, Key} = decode_public_key_v2(KeyBlob, Alg),
- KeyCb = proplists:get_value(key_cb, Opts, ssh_file),
- KeyCb:is_auth_key(Key, User, Opts)
+ {KeyCb,KeyCbOpts} = ?GET_OPT(key_cb, Opts),
+ UserOpts = ?GET_OPT(user_options, Opts),
+ KeyCb:is_auth_key(Key, User, [{key_cb_private,KeyCbOpts}|UserOpts])
catch
_:_ ->
false
@@ -529,9 +513,10 @@ pre_verify_sig(User, Alg, KeyBlob, Opts) ->
verify_sig(SessionId, User, Service, Alg, KeyBlob, SigWLen, Opts) ->
try
{ok, Key} = decode_public_key_v2(KeyBlob, Alg),
- KeyCb = proplists:get_value(key_cb, Opts, ssh_file),
- case KeyCb:is_auth_key(Key, User, Opts) of
+ {KeyCb,KeyCbOpts} = ?GET_OPT(key_cb, Opts),
+ UserOpts = ?GET_OPT(user_options, Opts),
+ case KeyCb:is_auth_key(Key, User, [{key_cb_private,KeyCbOpts}|UserOpts]) of
true ->
PlainText = build_sig_data(SessionId, User,
Service, KeyBlob, Alg),
@@ -565,9 +550,9 @@ decode_keyboard_interactive_prompts(_NumPrompts, Data) ->
keyboard_interact_get_responses(IoCb, Opts, Name, Instr, PromptInfos) ->
NumPrompts = length(PromptInfos),
- keyboard_interact_get_responses(proplists:get_value(user_interaction, Opts, true),
- proplists:get_value(keyboard_interact_fun, Opts),
- proplists:get_value(password, Opts, undefined), IoCb, Name,
+ keyboard_interact_get_responses(?GET_OPT(user_interaction, Opts),
+ ?GET_OPT(keyboard_interact_fun, Opts),
+ ?GET_OPT(password, Opts), IoCb, Name,
Instr, PromptInfos, Opts, NumPrompts).
diff --git a/lib/ssh/src/ssh_bits.erl b/lib/ssh/src/ssh_bits.erl
index 8bedaaf0c5..3ce7758447 100644
--- a/lib/ssh/src/ssh_bits.erl
+++ b/lib/ssh/src/ssh_bits.erl
@@ -30,39 +30,31 @@
-export([random/1]).
%%%----------------------------------------------------------------
-name_list([Name]) -> to_bin(Name);
-name_list([Name|Ns]) -> <<(to_bin(Name))/binary, ",", (name_list(Ns))/binary>>;
-name_list([]) -> <<>>.
-
-to_bin(A) when is_atom(A) -> list_to_binary(atom_to_list(A));
-to_bin(S) when is_list(S) -> list_to_binary(S);
-to_bin(B) when is_binary(B) -> B.
+name_list(NamesList) -> list_to_binary(lists:join($,, NamesList)).
%%%----------------------------------------------------------------
%%% Multi Precision Integer encoding
mpint(-1) -> <<0,0,0,1,16#ff>>;
mpint(0) -> <<0,0,0,0>>;
-mpint(X) when X < 0 -> mpint_neg(X,0,[]);
-mpint(X) -> mpint_pos(X,0,[]).
-
-mpint_neg(-1,I,Ds=[MSB|_]) ->
- if MSB band 16#80 =/= 16#80 ->
- <<?UINT32((I+1)), (list_to_binary([255|Ds]))/binary>>;
- true ->
- <<?UINT32(I), (list_to_binary(Ds))/binary>>
- end;
-mpint_neg(X,I,Ds) ->
- mpint_neg(X bsr 8,I+1,[(X band 255)|Ds]).
-
-mpint_pos(0,I,Ds=[MSB|_]) ->
- if MSB band 16#80 == 16#80 ->
- <<?UINT32((I+1)), (list_to_binary([0|Ds]))/binary>>;
- true ->
- <<?UINT32(I), (list_to_binary(Ds))/binary>>
+mpint(I) when I>0 ->
+ <<B1,V/binary>> = binary:encode_unsigned(I),
+ case B1 band 16#80 of
+ 16#80 ->
+ <<(size(V)+2):32/unsigned-big-integer, 0,B1,V/binary >>;
+ _ ->
+ <<(size(V)+1):32/unsigned-big-integer, B1,V/binary >>
end;
-mpint_pos(X,I,Ds) ->
- mpint_pos(X bsr 8,I+1,[(X band 255)|Ds]).
-
+mpint(N) when N<0 ->
+ Sxn = 8*size(binary:encode_unsigned(-N)),
+ Sxn1 = Sxn+8,
+ <<W:Sxn1>> = <<1, 0:Sxn>>,
+ <<B1,V/binary>> = binary:encode_unsigned(W+N),
+ case B1 band 16#80 of
+ 16#80 ->
+ <<(size(V)+1):32/unsigned-big-integer, B1,V/binary >>;
+ _ ->
+ <<(size(V)+2):32/unsigned-big-integer, 255,B1,V/binary >>
+ end.
%%%----------------------------------------------------------------
%% random/1
diff --git a/lib/ssh/src/ssh_cli.erl b/lib/ssh/src/ssh_cli.erl
index 8af0ecc5f9..4c4f61e036 100644
--- a/lib/ssh/src/ssh_cli.erl
+++ b/lib/ssh/src/ssh_cli.erl
@@ -453,14 +453,20 @@ move_cursor(From, To, #ssh_pty{width=Width, term=Type}) ->
%% %%% make sure that there is data to send
%% %%% before calling ssh_connection:send
write_chars(ConnectionHandler, ChannelId, Chars) ->
- case erlang:iolist_size(Chars) of
- 0 ->
- ok;
- _ ->
- ssh_connection:send(ConnectionHandler, ChannelId,
- ?SSH_EXTENDED_DATA_DEFAULT, Chars)
+ case has_chars(Chars) of
+ false -> ok;
+ true -> ssh_connection:send(ConnectionHandler,
+ ChannelId,
+ ?SSH_EXTENDED_DATA_DEFAULT,
+ Chars)
end.
+has_chars([C|_]) when is_integer(C) -> true;
+has_chars([H|T]) when is_list(H) ; is_binary(H) -> has_chars(H) orelse has_chars(T);
+has_chars(<<_:8,_/binary>>) -> true;
+has_chars(_) -> false.
+
+
%%% tail, works with empty lists
tl1([_|A]) -> A;
tl1(_) -> [].
@@ -493,14 +499,12 @@ start_shell(ConnectionHandler, State) ->
[peer, user]),
ShellFun = case is_function(Shell) of
true ->
- User =
- proplists:get_value(user, ConnectionInfo),
+ User = proplists:get_value(user, ConnectionInfo),
case erlang:fun_info(Shell, arity) of
{arity, 1} ->
fun() -> Shell(User) end;
{arity, 2} ->
- {_, PeerAddr} =
- proplists:get_value(peer, ConnectionInfo),
+ {_, PeerAddr} = proplists:get_value(peer, ConnectionInfo),
fun() -> Shell(User, PeerAddr) end;
_ ->
Shell
@@ -519,8 +523,7 @@ start_shell(ConnectionHandler, Cmd, #state{exec=Shell} = State) when is_function
ConnectionInfo = ssh_connection_handler:connection_info(ConnectionHandler,
[peer, user]),
- User =
- proplists:get_value(user, ConnectionInfo),
+ User = proplists:get_value(user, ConnectionInfo),
ShellFun =
case erlang:fun_info(Shell, arity) of
{arity, 1} ->
@@ -528,8 +531,7 @@ start_shell(ConnectionHandler, Cmd, #state{exec=Shell} = State) when is_function
{arity, 2} ->
fun() -> Shell(Cmd, User) end;
{arity, 3} ->
- {_, PeerAddr} =
- proplists:get_value(peer, ConnectionInfo),
+ {_, PeerAddr} = proplists:get_value(peer, ConnectionInfo),
fun() -> Shell(Cmd, User, PeerAddr) end;
_ ->
Shell
diff --git a/lib/ssh/src/ssh_connect.hrl b/lib/ssh/src/ssh_connect.hrl
index 4fb6bc39f3..c91c56435e 100644
--- a/lib/ssh/src/ssh_connect.hrl
+++ b/lib/ssh/src/ssh_connect.hrl
@@ -22,9 +22,9 @@
%%% Description : SSH connection protocol
--type role() :: client | server .
--type connection_ref() :: pid().
-type channel_id() :: pos_integer().
+-type connection_ref() :: pid().
+
-define(DEFAULT_PACKET_SIZE, 65536).
-define(DEFAULT_WINDOW_SIZE, 10*?DEFAULT_PACKET_SIZE).
diff --git a/lib/ssh/src/ssh_connection.erl b/lib/ssh/src/ssh_connection.erl
index c7a2c92670..930ccecb4c 100644
--- a/lib/ssh/src/ssh_connection.erl
+++ b/lib/ssh/src/ssh_connection.erl
@@ -56,8 +56,8 @@
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
--spec session_channel(pid(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}.
--spec session_channel(pid(), integer(), integer(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}.
+-spec session_channel(connection_ref(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}.
+-spec session_channel(connection_ref(), integer(), integer(), timeout()) -> {ok, channel_id()} | {error, timeout | closed}.
%% Description: Opens a channel for a ssh session. A session is a
%% remote execution of a program. The program may be a shell, an
@@ -81,7 +81,7 @@ session_channel(ConnectionHandler, InitialWindowSize,
end.
%%--------------------------------------------------------------------
--spec exec(pid(), channel_id(), string(), timeout()) ->
+-spec exec(connection_ref(), channel_id(), string(), timeout()) ->
success | failure | {error, timeout | closed}.
%% Description: Will request that the server start the
@@ -92,7 +92,7 @@ exec(ConnectionHandler, ChannelId, Command, TimeOut) ->
true, [?string(Command)], TimeOut).
%%--------------------------------------------------------------------
--spec shell(pid(), channel_id()) -> _.
+-spec shell(connection_ref(), channel_id()) -> _.
%% Description: Will request that the user's default shell (typically
%% defined in /etc/passwd in UNIX systems) be started at the other
@@ -102,7 +102,7 @@ shell(ConnectionHandler, ChannelId) ->
ssh_connection_handler:request(ConnectionHandler, self(), ChannelId,
"shell", false, <<>>, 0).
%%--------------------------------------------------------------------
--spec subsystem(pid(), channel_id(), string(), timeout()) ->
+-spec subsystem(connection_ref(), channel_id(), string(), timeout()) ->
success | failure | {error, timeout | closed}.
%%
%% Description: Executes a predefined subsystem.
@@ -112,11 +112,11 @@ subsystem(ConnectionHandler, ChannelId, SubSystem, TimeOut) ->
ChannelId, "subsystem",
true, [?string(SubSystem)], TimeOut).
%%--------------------------------------------------------------------
--spec send(pid(), channel_id(), iodata()) ->
+-spec send(connection_ref(), channel_id(), iodata()) ->
ok | {error, closed}.
--spec send(pid(), channel_id(), integer()| iodata(), timeout() | iodata()) ->
+-spec send(connection_ref(), channel_id(), integer()| iodata(), timeout() | iodata()) ->
ok | {error, timeout} | {error, closed}.
--spec send(pid(), channel_id(), integer(), iodata(), timeout()) ->
+-spec send(connection_ref(), channel_id(), integer(), iodata(), timeout()) ->
ok | {error, timeout} | {error, closed}.
%%
%%
@@ -134,7 +134,7 @@ send(ConnectionHandler, ChannelId, Type, Data, TimeOut) ->
ssh_connection_handler:send(ConnectionHandler, ChannelId,
Type, Data, TimeOut).
%%--------------------------------------------------------------------
--spec send_eof(pid(), channel_id()) -> ok | {error, closed}.
+-spec send_eof(connection_ref(), channel_id()) -> ok | {error, closed}.
%%
%%
%% Description: Sends eof on the channel <ChannelId>.
@@ -143,7 +143,7 @@ send_eof(ConnectionHandler, Channel) ->
ssh_connection_handler:send_eof(ConnectionHandler, Channel).
%%--------------------------------------------------------------------
--spec adjust_window(pid(), channel_id(), integer()) -> ok | {error, closed}.
+-spec adjust_window(connection_ref(), channel_id(), integer()) -> ok | {error, closed}.
%%
%%
%% Description: Adjusts the ssh flowcontrol window.
@@ -152,7 +152,7 @@ adjust_window(ConnectionHandler, Channel, Bytes) ->
ssh_connection_handler:adjust_window(ConnectionHandler, Channel, Bytes).
%%--------------------------------------------------------------------
--spec setenv(pid(), channel_id(), string(), string(), timeout()) ->
+-spec setenv(connection_ref(), channel_id(), string(), string(), timeout()) ->
success | failure | {error, timeout | closed}.
%%
%%
@@ -165,7 +165,7 @@ setenv(ConnectionHandler, ChannelId, Var, Value, TimeOut) ->
%%--------------------------------------------------------------------
--spec close(pid(), channel_id()) -> ok.
+-spec close(connection_ref(), channel_id()) -> ok.
%%
%%
%% Description: Sends a close message on the channel <ChannelId>.
@@ -174,7 +174,7 @@ close(ConnectionHandler, ChannelId) ->
ssh_connection_handler:close(ConnectionHandler, ChannelId).
%%--------------------------------------------------------------------
--spec reply_request(pid(), boolean(), success | failure, channel_id()) -> ok.
+-spec reply_request(connection_ref(), boolean(), success | failure, channel_id()) -> ok.
%%
%%
%% Description: Send status replies to requests that want such replies.
@@ -185,9 +185,9 @@ reply_request(_,false, _, _) ->
ok.
%%--------------------------------------------------------------------
--spec ptty_alloc(pid(), channel_id(), proplists:proplist()) ->
+-spec ptty_alloc(connection_ref(), channel_id(), proplists:proplist()) ->
success | failiure | {error, closed}.
--spec ptty_alloc(pid(), channel_id(), proplists:proplist(), timeout()) ->
+-spec ptty_alloc(connection_ref(), channel_id(), proplists:proplist(), timeout()) ->
success | failiure | {error, timeout} | {error, closed}.
%%
@@ -197,16 +197,16 @@ reply_request(_,false, _, _) ->
ptty_alloc(ConnectionHandler, Channel, Options) ->
ptty_alloc(ConnectionHandler, Channel, Options, infinity).
ptty_alloc(ConnectionHandler, Channel, Options0, TimeOut) ->
- Options = backwards_compatible(Options0, []),
- {Width, PixWidth} = pty_default_dimensions(width, Options),
- {Height, PixHeight} = pty_default_dimensions(height, Options),
+ TermData = backwards_compatible(Options0, []), % FIXME
+ {Width, PixWidth} = pty_default_dimensions(width, TermData),
+ {Height, PixHeight} = pty_default_dimensions(height, TermData),
pty_req(ConnectionHandler, Channel,
- proplists:get_value(term, Options, os:getenv("TERM", ?DEFAULT_TERMINAL)),
- proplists:get_value(width, Options, Width),
- proplists:get_value(height, Options, Height),
- proplists:get_value(pixel_widh, Options, PixWidth),
- proplists:get_value(pixel_height, Options, PixHeight),
- proplists:get_value(pty_opts, Options, []), TimeOut
+ proplists:get_value(term, TermData, os:getenv("TERM", ?DEFAULT_TERMINAL)),
+ proplists:get_value(width, TermData, Width),
+ proplists:get_value(height, TermData, Height),
+ proplists:get_value(pixel_widh, TermData, PixWidth),
+ proplists:get_value(pixel_height, TermData, PixHeight),
+ proplists:get_value(pty_opts, TermData, []), TimeOut
).
%%--------------------------------------------------------------------
%% Not yet officialy supported! The following functions are part of the
@@ -417,7 +417,8 @@ handle_msg(#ssh_msg_channel_open{channel_type = "session" = Type,
maximum_packet_size = PacketSz},
#connection{options = SSHopts} = Connection0,
server) ->
- MinAcceptedPackSz = proplists:get_value(minimal_remote_max_packet_size, SSHopts, 0),
+ MinAcceptedPackSz =
+ ?GET_OPT(minimal_remote_max_packet_size, SSHopts),
if
MinAcceptedPackSz =< PacketSz ->
@@ -574,7 +575,6 @@ handle_msg(#ssh_msg_channel_request{recipient_channel = ChannelId,
PixWidth, PixHeight, decode_pty_opts(Modes)},
Channel = ssh_channel:cache_lookup(Cache, ChannelId),
-
handle_cli_msg(Connection, Channel,
{pty, ChannelId, WantReply, PtyRequest});
@@ -691,7 +691,6 @@ handle_cli_msg(#connection{channel_cache = Cache} = Connection,
#channel{user = undefined,
remote_id = RemoteId,
local_id = ChannelId} = Channel0, Reply0) ->
-
case (catch start_cli(Connection, ChannelId)) of
{ok, Pid} ->
erlang:monitor(process, Pid),
@@ -819,7 +818,7 @@ start_channel(Cb, Id, Args, SubSysSup, Exec, Opts) ->
ssh_channel_sup:start_child(ChannelSup, ChildSpec).
assert_limit_num_channels_not_exceeded(ChannelSup, Opts) ->
- MaxNumChannels = proplists:get_value(max_channels, Opts, infinity),
+ MaxNumChannels = ?GET_OPT(max_channels, Opts),
NumChannels = length([x || {_,_,worker,[ssh_channel]} <-
supervisor:which_children(ChannelSup)]),
if
@@ -858,8 +857,8 @@ setup_session(#connection{channel_cache = Cache
check_subsystem("sftp"= SsName, Options) ->
- case proplists:get_value(subsystems, Options, no_subsys) of
- no_subsys ->
+ case ?GET_OPT(subsystems, Options) of
+ no_subsys -> % FIXME: Can 'no_subsys' ever be matched?
{SsName, {Cb, Opts}} = ssh_sftpd:subsystem_spec([]),
{Cb, Opts};
SubSystems ->
@@ -867,7 +866,7 @@ check_subsystem("sftp"= SsName, Options) ->
end;
check_subsystem(SsName, Options) ->
- Subsystems = proplists:get_value(subsystems, Options, []),
+ Subsystems = ?GET_OPT(subsystems, Options),
case proplists:get_value(SsName, Subsystems, {none, []}) of
Fun when is_function(Fun) ->
{Fun, []};
@@ -1022,12 +1021,13 @@ pty_req(ConnectionHandler, Channel, Term, Width, Height,
?uint32(PixWidth),?uint32(PixHeight),
encode_pty_opts(PtyOpts)], TimeOut).
-pty_default_dimensions(Dimension, Options) ->
- case proplists:get_value(Dimension, Options, 0) of
+pty_default_dimensions(Dimension, TermData) ->
+ case proplists:get_value(Dimension, TermData, 0) of
N when is_integer(N), N > 0 ->
{N, 0};
_ ->
- case proplists:get_value(list_to_atom("pixel_" ++ atom_to_list(Dimension)), Options, 0) of
+ PixelDim = list_to_atom("pixel_" ++ atom_to_list(Dimension)),
+ case proplists:get_value(PixelDim, TermData, 0) of
N when is_integer(N), N > 0 ->
{0, N};
_ ->
diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl
index 7451c9e6d0..b9c643c77e 100644
--- a/lib/ssh/src/ssh_connection_handler.erl
+++ b/lib/ssh/src/ssh_connection_handler.erl
@@ -76,7 +76,7 @@
%%--------------------------------------------------------------------
-spec start_link(role(),
inet:socket(),
- proplists:proplist()
+ ssh_options:options()
) -> {ok, pid()}.
%% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
start_link(Role, Socket, Options) ->
@@ -99,12 +99,10 @@ stop(ConnectionHandler)->
%% Internal application API
%%====================================================================
--define(DefaultTransport, {tcp, gen_tcp, tcp_closed} ).
-
%%--------------------------------------------------------------------
-spec start_connection(role(),
inet:socket(),
- proplists:proplist(),
+ ssh_options:options(),
timeout()
) -> {ok, connection_ref()} | {error, term()}.
%% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
@@ -121,9 +119,8 @@ start_connection(client = Role, Socket, Options, Timeout) ->
end;
start_connection(server = Role, Socket, Options, Timeout) ->
- SSH_Opts = proplists:get_value(ssh_opts, Options, []),
try
- case proplists:get_value(parallel_login, SSH_Opts, false) of
+ case ?GET_OPT(parallel_login, Options) of
true ->
HandshakerPid =
spawn_link(fun() ->
@@ -346,7 +343,7 @@ renegotiate_data(ConnectionHandler) ->
| undefined,
last_size_rekey = 0 :: non_neg_integer(),
event_queue = [] :: list(),
- opts :: proplists:proplist(),
+ opts :: ssh_options:options(),
inet_initial_recbuf_size :: pos_integer()
| undefined
}).
@@ -357,15 +354,14 @@ renegotiate_data(ConnectionHandler) ->
%%--------------------------------------------------------------------
-spec init_connection_handler(role(),
inet:socket(),
- proplists:proplist()
+ ssh_options:options()
) -> no_return().
%% . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . .
init_connection_handler(Role, Socket, Opts) ->
process_flag(trap_exit, true),
S0 = init_process_state(Role, Socket, Opts),
try
- {Protocol, Callback, CloseTag} =
- proplists:get_value(transport, Opts, ?DefaultTransport),
+ {Protocol, Callback, CloseTag} = ?GET_OPT(transport, Opts),
S0#data{ssh_params = init_ssh_record(Role, Socket, Opts),
transport_protocol = Protocol,
transport_cb = Callback,
@@ -393,7 +389,7 @@ init_process_state(Role, Socket, Opts) ->
port_bindings = [],
requests = [],
options = Opts},
- starter = proplists:get_value(user_pid, Opts),
+ starter = ?GET_INTERNAL_OPT(user_pid, Opts),
socket = Socket,
opts = Opts
},
@@ -409,13 +405,18 @@ init_process_state(Role, Socket, Opts) ->
init_connection(server, C = #connection{}, Opts) ->
- Sups = proplists:get_value(supervisors, Opts),
- SystemSup = proplists:get_value(system_sup, Sups),
- SubSystemSup = proplists:get_value(subsystem_sup, Sups),
+ Sups = ?GET_INTERNAL_OPT(supervisors, Opts),
+
+ SystemSup = proplists:get_value(system_sup, Sups),
+ SubSystemSup = proplists:get_value(subsystem_sup, Sups),
ConnectionSup = proplists:get_value(connection_sup, Sups),
- Shell = proplists:get_value(shell, Opts),
- Exec = proplists:get_value(exec, Opts),
- CliSpec = proplists:get_value(ssh_cli, Opts, {ssh_cli, [Shell]}),
+
+ Shell = ?GET_OPT(shell, Opts),
+ Exec = ?GET_OPT(exec, Opts),
+ CliSpec = case ?GET_OPT(ssh_cli, Opts) of
+ undefined -> {ssh_cli, [Shell]};
+ Spec -> Spec
+ end,
C#connection{cli_spec = CliSpec,
exec = Exec,
system_supervisor = SystemSup,
@@ -426,41 +427,38 @@ init_connection(server, C = #connection{}, Opts) ->
init_ssh_record(Role, Socket, Opts) ->
{ok, PeerAddr} = inet:peername(Socket),
- KeyCb = proplists:get_value(key_cb, Opts, ssh_file),
- AuthMethods = proplists:get_value(auth_methods,
- Opts,
- case Role of
- server -> ?SUPPORTED_AUTH_METHODS;
- client -> undefined
- end),
+ KeyCb = ?GET_OPT(key_cb, Opts),
+ AuthMethods =
+ case Role of
+ server -> ?GET_OPT(auth_methods, Opts);
+ client -> undefined
+ end,
S0 = #ssh{role = Role,
key_cb = KeyCb,
opts = Opts,
userauth_supported_methods = AuthMethods,
available_host_keys = supported_host_keys(Role, KeyCb, Opts),
- random_length_padding = proplists:get_value(max_random_length_padding,
- Opts,
- (#ssh{})#ssh.random_length_padding)
+ random_length_padding = ?GET_OPT(max_random_length_padding, Opts)
},
{Vsn, Version} = ssh_transport:versions(Role, Opts),
case Role of
client ->
- PeerName = proplists:get_value(host, Opts),
+ PeerName = ?GET_INTERNAL_OPT(host, Opts),
S0#ssh{c_vsn = Vsn,
c_version = Version,
- io_cb = case proplists:get_value(user_interaction, Opts, true) of
+ io_cb = case ?GET_OPT(user_interaction, Opts) of
true -> ssh_io;
false -> ssh_no_io
end,
- userauth_quiet_mode = proplists:get_value(quiet_mode, Opts, false),
+ userauth_quiet_mode = ?GET_OPT(quiet_mode, Opts),
peer = {PeerName, PeerAddr}
};
server ->
S0#ssh{s_vsn = Vsn,
s_version = Version,
- io_cb = proplists:get_value(io_cb, Opts, ssh_io),
+ io_cb = ?GET_INTERNAL_OPT(io_cb, Opts, ssh_io),
userauth_methods = string:tokens(AuthMethods, ","),
kb_tries_left = 3,
peer = {undefined, PeerAddr}
@@ -609,13 +607,15 @@ handle_event(_, #ssh_msg_kexdh_reply{} = Msg, {key_exchange,client,ReNeg}, D) ->
%%%---- diffie-hellman group exchange
handle_event(_, #ssh_msg_kex_dh_gex_request{} = Msg, {key_exchange,server,ReNeg}, D) ->
- {ok, GexGroup, Ssh} = ssh_transport:handle_kex_dh_gex_request(Msg, D#data.ssh_params),
+ {ok, GexGroup, Ssh1} = ssh_transport:handle_kex_dh_gex_request(Msg, D#data.ssh_params),
send_bytes(GexGroup, D),
+ Ssh = ssh_transport:parallell_gen_key(Ssh1),
{next_state, {key_exchange_dh_gex_init,server,ReNeg}, D#data{ssh_params=Ssh}};
handle_event(_, #ssh_msg_kex_dh_gex_request_old{} = Msg, {key_exchange,server,ReNeg}, D) ->
- {ok, GexGroup, Ssh} = ssh_transport:handle_kex_dh_gex_request(Msg, D#data.ssh_params),
+ {ok, GexGroup, Ssh1} = ssh_transport:handle_kex_dh_gex_request(Msg, D#data.ssh_params),
send_bytes(GexGroup, D),
+ Ssh = ssh_transport:parallell_gen_key(Ssh1),
{next_state, {key_exchange_dh_gex_init,server,ReNeg}, D#data{ssh_params=Ssh}};
handle_event(_, #ssh_msg_kex_dh_gex_group{} = Msg, {key_exchange,client,ReNeg}, D) ->
@@ -847,14 +847,12 @@ handle_event(_, Msg = #ssh_msg_userauth_failure{}, {userauth_keyboard_interactiv
handle_event(_, Msg=#ssh_msg_userauth_failure{}, {userauth_keyboard_interactive_info_response, client},
#data{ssh_params = Ssh0} = D0) ->
Opts = Ssh0#ssh.opts,
- D = case proplists:get_value(password, Opts) of
+ D = case ?GET_OPT(password, Opts) of
undefined ->
D0;
_ ->
D0#data{ssh_params =
- Ssh0#ssh{opts =
- lists:keyreplace(password,1,Opts,
- {password,not_ok})}} % FIXME:intermodule dependency
+ Ssh0#ssh{opts = ?PUT_OPT({password,not_ok}, Opts)}} % FIXME:intermodule dependency
end,
{next_state, {userauth,client}, D, [{next_event, internal, Msg}]};
@@ -952,7 +950,7 @@ handle_event(cast, renegotiate, _, _) ->
handle_event(cast, data_size, {connected,Role}, D) ->
{ok, [{send_oct,Sent0}]} = inet:getstat(D#data.socket, [send_oct]),
Sent = Sent0 - D#data.last_size_rekey,
- MaxSent = proplists:get_value(rekey_limit, D#data.opts, 1024000000),
+ MaxSent = ?GET_OPT(rekey_limit, D#data.opts),
timer:apply_after(?REKEY_DATA_TIMOUT, gen_statem, cast, [self(), data_size]),
case Sent >= MaxSent of
true ->
@@ -1206,7 +1204,7 @@ handle_event(info, {Proto, Sock, NewData}, StateName, D0 = #data{socket = Sock,
catch
_C:_E ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Encountered unexpected input"},
+ description = "Bad packet"},
StateName, D)
end;
@@ -1221,13 +1219,12 @@ handle_event(info, {Proto, Sock, NewData}, StateName, D0 = #data{socket = Sock,
{bad_mac, Ssh1} ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Bad mac"},
+ description = "Bad packet"},
StateName, D0#data{ssh_params=Ssh1});
- {error, {exceeds_max_size,PacketLen}} ->
+ {error, {exceeds_max_size,_PacketLen}} ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Bad packet length "
- ++ integer_to_list(PacketLen)},
+ description = "Bad packet"},
StateName, D0)
catch
_C:_E ->
@@ -1293,11 +1290,12 @@ handle_event(info, UnexpectedMessage, StateName, D = #data{ssh_params = Ssh}) ->
"Unexpected message '~p' received in state '~p'\n"
"Role: ~p\n"
"Peer: ~p\n"
- "Local Address: ~p\n", [UnexpectedMessage,
- StateName,
- Ssh#ssh.role,
- Ssh#ssh.peer,
- proplists:get_value(address, Ssh#ssh.opts)])),
+ "Local Address: ~p\n",
+ [UnexpectedMessage,
+ StateName,
+ Ssh#ssh.role,
+ Ssh#ssh.peer,
+ ?GET_INTERNAL_OPT(address, Ssh#ssh.opts)])),
error_logger:info_report(Msg),
keep_state_and_data;
@@ -1311,11 +1309,12 @@ handle_event(info, UnexpectedMessage, StateName, D = #data{ssh_params = Ssh}) ->
"Message: ~p\n"
"Role: ~p\n"
"Peer: ~p\n"
- "Local Address: ~p\n", [Other,
- UnexpectedMessage,
- Ssh#ssh.role,
- element(2,Ssh#ssh.peer),
- proplists:get_value(address, Ssh#ssh.opts)]
+ "Local Address: ~p\n",
+ [Other,
+ UnexpectedMessage,
+ Ssh#ssh.role,
+ element(2,Ssh#ssh.peer),
+ ?GET_INTERNAL_OPT(address, Ssh#ssh.opts)]
)),
error_logger:error_report(Msg),
keep_state_and_data
@@ -1437,11 +1436,11 @@ code_change(_OldVsn, StateName, State, _Extra) ->
%%--------------------------------------------------------------------
%% Starting
-start_the_connection_child(UserPid, Role, Socket, Options) ->
- Sups = proplists:get_value(supervisors, Options),
+start_the_connection_child(UserPid, Role, Socket, Options0) ->
+ Sups = ?GET_INTERNAL_OPT(supervisors, Options0),
ConnectionSup = proplists:get_value(connection_sup, Sups),
- Opts = [{supervisors, Sups}, {user_pid, UserPid} | proplists:get_value(ssh_opts, Options, [])],
- {ok, Pid} = ssh_connection_sup:start_child(ConnectionSup, [Role, Socket, Opts]),
+ Options = ?PUT_INTERNAL_OPT({user_pid,UserPid}, Options0),
+ {ok, Pid} = ssh_connection_sup:start_child(ConnectionSup, [Role, Socket, Options]),
ok = socket_control(Socket, Pid, Options),
Pid.
@@ -1480,35 +1479,41 @@ renegotiation(_) -> false.
%%--------------------------------------------------------------------
supported_host_keys(client, _, Options) ->
try
- case proplists:get_value(public_key,
- proplists:get_value(preferred_algorithms,Options,[])
- ) of
- undefined ->
- ssh_transport:default_algorithms(public_key);
- L ->
- L -- (L--ssh_transport:default_algorithms(public_key))
- end
+ find_sup_hkeys(Options)
of
[] ->
- {stop, {shutdown, "No public key algs"}};
+ error({shutdown, "No public key algs"});
Algs ->
[atom_to_list(A) || A<-Algs]
catch
exit:Reason ->
- {stop, {shutdown, Reason}}
+ error({shutdown, Reason})
end;
supported_host_keys(server, KeyCb, Options) ->
- [atom_to_list(A) || A <- proplists:get_value(public_key,
- proplists:get_value(preferred_algorithms,Options,[]),
- ssh_transport:default_algorithms(public_key)
- ),
+ [atom_to_list(A) || A <- find_sup_hkeys(Options),
available_host_key(KeyCb, A, Options)
].
-%% Alg :: atom()
-available_host_key(KeyCb, Alg, Opts) ->
- element(1, catch KeyCb:host_key(Alg, Opts)) == ok.
+find_sup_hkeys(Options) ->
+ case proplists:get_value(public_key,
+ ?GET_OPT(preferred_algorithms,Options)
+ )
+ of
+ undefined ->
+ ssh_transport:default_algorithms(public_key);
+ L ->
+ NonSupported = L--ssh_transport:supported_algorithms(public_key),
+ L -- NonSupported
+ end.
+
+
+
+%% Alg :: atom()
+available_host_key({KeyCb,KeyCbOpts}, Alg, Opts) ->
+ UserOpts = ?GET_OPT(user_options, Opts),
+ element(1,
+ catch KeyCb:host_key(Alg, [{key_cb_private,KeyCbOpts}|UserOpts])) == ok.
send_msg(Msg, State=#data{ssh_params=Ssh0}) when is_tuple(Msg) ->
{Bytes, Ssh} = ssh_transport:ssh_packet(Msg, Ssh0),
@@ -1764,47 +1769,24 @@ get_repl(X, Acc) ->
exit({get_repl,X,Acc}).
%%%----------------------------------------------------------------
-disconnect_fun({disconnect,Msg}, D) ->
- disconnect_fun(Msg, D);
-disconnect_fun(Reason, #data{opts=Opts}) ->
- case proplists:get_value(disconnectfun, Opts) of
- undefined ->
- ok;
- Fun ->
- catch Fun(Reason)
- end.
-
-unexpected_fun(UnexpectedMessage, #data{opts = Opts,
- ssh_params = #ssh{peer = {_,Peer} }
- } ) ->
- case proplists:get_value(unexpectedfun, Opts) of
- undefined ->
- report;
- Fun ->
- catch Fun(UnexpectedMessage, Peer)
- end.
+-define(CALL_FUN(Key,D), catch (?GET_OPT(Key, D#data.opts)) ).
+
+disconnect_fun({disconnect,Msg}, D) -> ?CALL_FUN(disconnectfun,D)(Msg);
+disconnect_fun(Reason, D) -> ?CALL_FUN(disconnectfun,D)(Reason).
+unexpected_fun(UnexpectedMessage, #data{ssh_params = #ssh{peer = {_,Peer} }} = D) ->
+ ?CALL_FUN(unexpectedfun,D)(UnexpectedMessage, Peer).
debug_fun(#ssh_msg_debug{always_display = Display,
message = DbgMsg,
language = Lang},
- #data{opts = Opts}) ->
- case proplists:get_value(ssh_msg_debug_fun, Opts) of
- undefined ->
- ok;
- Fun ->
- catch Fun(self(), Display, DbgMsg, Lang)
- end.
+ D) ->
+ ?CALL_FUN(ssh_msg_debug_fun,D)(self(), Display, DbgMsg, Lang).
-connected_fun(User, Method, #data{ssh_params = #ssh{peer = {_,Peer}},
- opts = Opts}) ->
- case proplists:get_value(connectfun, Opts) of
- undefined ->
- ok;
- Fun ->
- catch Fun(User, Peer, Method)
- end.
+connected_fun(User, Method, #data{ssh_params = #ssh{peer = {_,Peer}}} = D) ->
+ ?CALL_FUN(connectfun,D)(User, Peer, Method).
+
retry_fun(_, undefined, _) ->
ok;
@@ -1818,7 +1800,7 @@ retry_fun(User, Reason, #data{ssh_params = #ssh{opts = Opts,
_ ->
{infofun, Reason}
end,
- Fun = proplists:get_value(Tag, Opts, fun(_,_)-> ok end),
+ Fun = ?GET_OPT(Tag, Opts),
try erlang:fun_info(Fun, arity)
of
{arity, 2} -> %% Backwards compatible
@@ -1837,7 +1819,7 @@ retry_fun(User, Reason, #data{ssh_params = #ssh{opts = Opts,
%%% channels open for a while.
cache_init_idle_timer(D) ->
- case proplists:get_value(idle_time, D#data.opts, infinity) of
+ case ?GET_OPT(idle_time, D#data.opts) of
infinity ->
D#data{idle_timer_value = infinity,
idle_timer_ref = infinity % A flag used later...
@@ -1900,9 +1882,8 @@ start_channel_request_timer(Channel, From, Time) ->
%%% Connection start and initalization helpers
socket_control(Socket, Pid, Options) ->
- {_, TransportCallback, _} = % For example {_,gen_tcp,_}
- proplists:get_value(transport, Options, ?DefaultTransport),
- case TransportCallback:controlling_process(Socket, Pid) of
+ {_, Callback, _} = ?GET_OPT(transport, Options),
+ case Callback:controlling_process(Socket, Pid) of
ok ->
gen_statem:cast(Pid, socket_control);
{error, Reason} ->
diff --git a/lib/ssh/src/ssh_dbg.erl b/lib/ssh/src/ssh_dbg.erl
index dff2bae9f2..0345bbdea7 100644
--- a/lib/ssh/src/ssh_dbg.erl
+++ b/lib/ssh/src/ssh_dbg.erl
@@ -50,50 +50,61 @@ messages(Write, MangleArg) when is_function(Write,2),
is_function(MangleArg,1) ->
catch dbg:start(),
setup_tracer(Write, MangleArg),
- dbg:p(new,c),
+ dbg:p(new,[c,timestamp]),
dbg_ssh_messages().
dbg_ssh_messages() ->
dbg:tp(ssh_message,encode,1, x),
dbg:tp(ssh_message,decode,1, x),
- dbg:tpl(ssh_transport,select_algorithm,3, x).
-
+ dbg:tpl(ssh_transport,select_algorithm,3, x),
+ dbg:tp(ssh_transport,hello_version_msg,1, x),
+ dbg:tp(ssh_transport,handle_hello_version,1, x).
+
%%%----------------------------------------------------------------
stop() ->
dbg:stop().
%%%================================================================
-msg_formater({trace,Pid,call,{ssh_message,encode,[Msg]}}, D) ->
- fmt("~nSEND ~p ~s~n", [Pid,wr_record(shrink_bin(Msg))], D);
-msg_formater({trace,_Pid,return_from,{ssh_message,encode,1},_Res}, D) ->
+msg_formater({trace_ts,Pid,call,{ssh_message,encode,[Msg]},TS}, D) ->
+ fmt("~n~s SEND ~p ~s~n", [ts(TS),Pid,wr_record(shrink_bin(Msg))], D);
+msg_formater({trace_ts,_Pid,return_from,{ssh_message,encode,1},_Res,_TS}, D) ->
D;
-msg_formater({trace,_Pid,call,{ssh_message,decode,_}}, D) ->
+msg_formater({trace_ts,_Pid,call,{ssh_message,decode,_},_TS}, D) ->
D;
-msg_formater({trace,Pid,return_from,{ssh_message,decode,1},Msg}, D) ->
- fmt("~n~p RECV ~s~n", [Pid,wr_record(shrink_bin(Msg))], D);
+msg_formater({trace_ts,Pid,return_from,{ssh_message,decode,1},Msg,TS}, D) ->
+ fmt("~n~s ~p RECV ~s~n", [ts(TS),Pid,wr_record(shrink_bin(Msg))], D);
-msg_formater({trace,_Pid,call,{ssh_transport,select_algorithm,_}}, D) ->
+msg_formater({trace_ts,_Pid,call,{ssh_transport,select_algorithm,_},_TS}, D) ->
+ D;
+msg_formater({trace_ts,Pid,return_from,{ssh_transport,select_algorithm,3},{ok,Alg},TS}, D) ->
+ fmt("~n~s ~p ALGORITHMS~n~s~n", [ts(TS),Pid, wr_record(Alg)], D);
+
+msg_formater({trace_ts,_Pid,call,{ssh_transport,hello_version_msg,_},_TS}, D) ->
D;
-msg_formater({trace,Pid,return_from,{ssh_transport,select_algorithm,3},{ok,Alg}}, D) ->
- fmt("~n~p ALGORITHMS~n~s~n", [Pid, wr_record(Alg)], D);
+msg_formater({trace_ts,Pid,return_from,{ssh_transport,hello_version_msg,1},Hello,TS}, D) ->
+ fmt("~n~s ~p TCP SEND HELLO~n ~p~n", [ts(TS),Pid,lists:flatten(Hello)], D);
+msg_formater({trace_ts,Pid,call,{ssh_transport,handle_hello_version,[Hello]},TS}, D) ->
+ fmt("~n~s ~p RECV HELLO~n ~p~n", [ts(TS),Pid,lists:flatten(Hello)], D);
+msg_formater({trace_ts,_Pid,return_from,{ssh_transport,handle_hello_version,1},_,_TS}, D) ->
+ D;
-msg_formater({trace,Pid,send,{tcp,Sock,Bytes},Pid}, D) ->
- fmt("~n~p TCP SEND on ~p~n ~p~n", [Pid,Sock, shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,send,{tcp,Sock,Bytes},Pid,TS}, D) ->
+ fmt("~n~s ~p TCP SEND on ~p~n ~p~n", [ts(TS),Pid,Sock, shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,send,{tcp,Sock,Bytes},Dest}, D) ->
- fmt("~n~p TCP SEND from ~p TO ~p~n ~p~n", [Pid,Sock,Dest, shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,send,{tcp,Sock,Bytes},Dest,TS}, D) ->
+ fmt("~n~s ~p TCP SEND from ~p TO ~p~n ~p~n", [ts(TS),Pid,Sock,Dest, shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,send,ErlangMsg,Dest}, D) ->
- fmt("~n~p ERL MSG SEND TO ~p~n ~p~n", [Pid,Dest, shrink_bin(ErlangMsg)], D);
+msg_formater({trace_ts,Pid,send,ErlangMsg,Dest,TS}, D) ->
+ fmt("~n~s ~p ERL MSG SEND TO ~p~n ~p~n", [ts(TS),Pid,Dest, shrink_bin(ErlangMsg)], D);
-msg_formater({trace,Pid,'receive',{tcp,Sock,Bytes}}, D) ->
- fmt("~n~p TCP RECEIVE on ~p~n ~p~n", [Pid,Sock,shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,'receive',{tcp,Sock,Bytes},TS}, D) ->
+ fmt("~n~s ~p TCP RECEIVE on ~p~n ~p~n", [ts(TS),Pid,Sock,shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,'receive',ErlangMsg}, D) ->
- fmt("~n~p ERL MSG RECEIVE~n ~p~n", [Pid,shrink_bin(ErlangMsg)], D);
+msg_formater({trace_ts,Pid,'receive',ErlangMsg,TS}, D) ->
+ fmt("~n~s ~p ERL MSG RECEIVE~n ~p~n", [ts(TS),Pid,shrink_bin(ErlangMsg)], D);
msg_formater(M, D) ->
@@ -106,6 +117,11 @@ msg_formater(M, D) ->
fmt(Fmt, Args, D=#data{writer=Write,acc=Acc}) ->
D#data{acc = Write(io_lib:format(Fmt, Args), Acc)}.
+ts({_,_,Usec}=Now) ->
+ {_Date,{HH,MM,SS}} = calendar:now_to_local_time(Now),
+ io_lib:format("~.2.0w:~.2.0w:~.2.0w.~.6.0w",[HH,MM,SS,Usec]);
+ts(_) ->
+ "-".
%%%----------------------------------------------------------------
setup_tracer(Write, MangleArg) ->
Handler = fun(Arg, D) ->
@@ -116,11 +132,11 @@ setup_tracer(Write, MangleArg) ->
ok.
%%%----------------------------------------------------------------
-shrink_bin(B) when is_binary(B), size(B)>100 -> {'*** SHRINKED BIN',
+shrink_bin(B) when is_binary(B), size(B)>256 -> {'*** SHRINKED BIN',
size(B),
- element(1,split_binary(B,20)),
+ element(1,split_binary(B,64)),
'...',
- element(2,split_binary(B,size(B)-20))
+ element(2,split_binary(B,size(B)-64))
};
shrink_bin(L) when is_list(L) -> lists:map(fun shrink_bin/1, L);
shrink_bin(T) when is_tuple(T) -> list_to_tuple(shrink_bin(tuple_to_list(T)));
diff --git a/lib/ssh/src/ssh_file.erl b/lib/ssh/src/ssh_file.erl
index 216f65f33a..898b4cc5c4 100644
--- a/lib/ssh/src/ssh_file.erl
+++ b/lib/ssh/src/ssh_file.erl
@@ -192,8 +192,8 @@ lookup_user_key(Key, User, Opts) ->
ssh_dir({remoteuser, User}, Opts) ->
case proplists:get_value(user_dir_fun, Opts) of
undefined ->
- case proplists:get_value(user_dir, Opts) of
- undefined ->
+ case proplists:get_value(user_dir, Opts, false) of
+ false ->
default_user_dir();
Dir ->
Dir
diff --git a/lib/ssh/src/ssh_io.erl b/lib/ssh/src/ssh_io.erl
index 1d8f370884..6828fd4760 100644
--- a/lib/ssh/src/ssh_io.erl
+++ b/lib/ssh/src/ssh_io.erl
@@ -27,17 +27,17 @@
-export([yes_no/2, read_password/2, read_line/2, format/2]).
-include("ssh.hrl").
-read_line(Prompt, Ssh) ->
+read_line(Prompt, Opts) ->
format("~s", [listify(Prompt)]),
- proplists:get_value(user_pid, Ssh) ! {self(), question},
+ ?GET_INTERNAL_OPT(user_pid, Opts) ! {self(), question},
receive
Answer when is_list(Answer) ->
Answer
end.
-yes_no(Prompt, Ssh) ->
+yes_no(Prompt, Opts) ->
format("~s [y/n]?", [Prompt]),
- proplists:get_value(user_pid, Ssh#ssh.opts) ! {self(), question},
+ ?GET_INTERNAL_OPT(user_pid, Opts) ! {self(), question},
receive
%% I can't see that the atoms y and n are ever received, but it must
%% be investigated before removing
@@ -52,15 +52,13 @@ yes_no(Prompt, Ssh) ->
"N" -> no;
_ ->
format("please answer y or n\n",[]),
- yes_no(Prompt, Ssh)
+ yes_no(Prompt, Opts)
end
end.
-
-read_password(Prompt, #ssh{opts=Opts}) -> read_password(Prompt, Opts);
-read_password(Prompt, Opts) when is_list(Opts) ->
+read_password(Prompt, Opts) ->
format("~s", [listify(Prompt)]),
- proplists:get_value(user_pid, Opts) ! {self(), user_password},
+ ?GET_INTERNAL_OPT(user_pid, Opts) ! {self(), user_password},
receive
Answer when is_list(Answer) ->
case trim(Answer) of
diff --git a/lib/ssh/src/ssh_options.erl b/lib/ssh/src/ssh_options.erl
new file mode 100644
index 0000000000..395be6b220
--- /dev/null
+++ b/lib/ssh/src/ssh_options.erl
@@ -0,0 +1,895 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2004-2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssh_options).
+
+-include("ssh.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-export([default/1,
+ get_value/5, get_value/6,
+ put_value/5,
+ handle_options/2
+ ]).
+
+-export_type([options/0
+ ]).
+
+%%%================================================================
+%%% Types
+
+-type options() :: #{socket_options := socket_options(),
+ internal_options := internal_options(),
+ option_key() => any()
+ }.
+
+-type socket_options() :: proplists:proplist().
+-type internal_options() :: #{option_key() => any()}.
+
+-type option_key() :: atom().
+
+-type option_in() :: proplists:property() | proplists:proplist() .
+
+-type option_class() :: internal_options | socket_options | user_options .
+
+-type option_declaration() :: #{class := user_options,
+ chk := fun((any) -> boolean() | {true,any()}),
+ default => any()
+ }.
+
+-type option_declarations() :: #{ {option_key(),def} := option_declaration() }.
+
+-type error() :: {error,{eoptions,any()}} .
+
+%%%================================================================
+%%%
+%%% Get an option
+%%%
+
+-spec get_value(option_class(), option_key(), options(),
+ atom(), non_neg_integer()) -> any() | no_return().
+
+get_value(Class, Key, Opts, _CallerMod, _CallerLine) when is_map(Opts) ->
+ case Class of
+ internal_options -> maps:get(Key, maps:get(internal_options,Opts));
+ socket_options -> proplists:get_value(Key, maps:get(socket_options,Opts));
+ user_options -> maps:get(Key, Opts)
+ end;
+get_value(Class, Key, Opts, _CallerMod, _CallerLine) ->
+ io:format("*** Bad Opts GET OPT ~p ~p:~p Key=~p,~n Opts=~p~n",[Class,_CallerMod,_CallerLine,Key,Opts]),
+ error({bad_options,Class, Key, Opts, _CallerMod, _CallerLine}).
+
+
+-spec get_value(option_class(), option_key(), options(), any(),
+ atom(), non_neg_integer()) -> any() | no_return().
+
+get_value(socket_options, Key, Opts, Def, _CallerMod, _CallerLine) when is_map(Opts) ->
+ proplists:get_value(Key, maps:get(socket_options,Opts), Def);
+get_value(Class, Key, Opts, Def, CallerMod, CallerLine) when is_map(Opts) ->
+ try get_value(Class, Key, Opts, CallerMod, CallerLine)
+ catch
+ error:{badkey,Key} -> Def
+ end;
+get_value(Class, Key, Opts, _Def, _CallerMod, _CallerLine) ->
+ io:format("*** Bad Opts GET OPT ~p ~p:~p Key=~p,~n Opts=~p~n",[Class,_CallerMod,_CallerLine,Key,Opts]),
+ error({bad_options,Class, Key, Opts, _CallerMod, _CallerLine}).
+
+
+%%%================================================================
+%%%
+%%% Put an option
+%%%
+
+-spec put_value(option_class(), option_in(), options(),
+ atom(), non_neg_integer()) -> options().
+
+put_value(user_options, KeyVal, Opts, _CallerMod, _CallerLine) when is_map(Opts) ->
+ put_user_value(KeyVal, Opts);
+
+put_value(internal_options, KeyVal, Opts, _CallerMod, _CallerLine) when is_map(Opts) ->
+ InternalOpts = maps:get(internal_options,Opts),
+ Opts#{internal_options := put_internal_value(KeyVal, InternalOpts)};
+
+put_value(socket_options, KeyVal, Opts, _CallerMod, _CallerLine) when is_map(Opts) ->
+ SocketOpts = maps:get(socket_options,Opts),
+ Opts#{socket_options := put_socket_value(KeyVal, SocketOpts)}.
+
+
+%%%----------------
+put_user_value(L, Opts) when is_list(L) ->
+ lists:foldl(fun put_user_value/2, Opts, L);
+put_user_value({Key,Value}, Opts) ->
+ Opts#{Key := Value}.
+
+%%%----------------
+put_internal_value(L, IntOpts) when is_list(L) ->
+ lists:foldl(fun put_internal_value/2, IntOpts, L);
+put_internal_value({Key,Value}, IntOpts) ->
+ IntOpts#{Key => Value}.
+
+%%%----------------
+put_socket_value(L, SockOpts) when is_list(L) ->
+ L ++ SockOpts;
+put_socket_value({Key,Value}, SockOpts) ->
+ [{Key,Value} | SockOpts];
+put_socket_value(A, SockOpts) when is_atom(A) ->
+ [A | SockOpts].
+
+%%%================================================================
+%%%
+%%% Initialize the options
+%%%
+
+-spec handle_options(role(), proplists:proplist()) -> options() | error() .
+
+-spec handle_options(role(), proplists:proplist(), options()) -> options() | error() .
+
+handle_options(Role, PropList0) ->
+ handle_options(Role, PropList0, #{socket_options => [],
+ internal_options => #{},
+ user_options => []
+ }).
+
+handle_options(Role, PropList0, Opts0) when is_map(Opts0),
+ is_list(PropList0) ->
+ PropList1 = proplists:unfold(PropList0),
+ try
+ OptionDefinitions = default(Role),
+ InitialMap =
+ maps:fold(
+ fun({K,def}, #{default:=V}, M) -> M#{K=>V};
+ (_,_,M) -> M
+ end,
+ Opts0#{user_options =>
+ maps:get(user_options,Opts0) ++ PropList1
+ },
+ OptionDefinitions),
+ %% Enter the user's values into the map; unknown keys are
+ %% treated as socket options
+ lists:foldl(fun(KV, Vals) ->
+ save(KV, OptionDefinitions, Vals)
+ end, InitialMap, PropList1)
+ catch
+ error:{eoptions, KV, undefined} ->
+ {error, {eoptions,KV}};
+
+ error:{eoptions, KV, Txt} when is_list(Txt) ->
+ {error, {eoptions,{KV,lists:flatten(Txt)}}};
+
+ error:{eoptions, KV, Extra} ->
+ {error, {eoptions,{KV,Extra}}}
+ end.
+
+
+check_fun(Key, Defs) ->
+ #{chk := Fun} = maps:get({Key,def}, Defs),
+ Fun.
+
+%%%================================================================
+%%%
+%%% Check and save one option
+%%%
+
+
+%%% First some prohibited inet options:
+save({K,V}, _, _) when K == reuseaddr ;
+ K == active
+ ->
+ forbidden_option(K, V);
+
+%%% then compatibility conversions:
+save({allow_user_interaction,V}, Opts, Vals) ->
+ save({user_interaction,V}, Opts, Vals);
+
+save({public_key_alg,V}, Defs, Vals) -> % To remove in OTP-20
+ New = case V of
+ 'ssh-rsa' -> ['ssh-rsa', 'ssh-dss'];
+ ssh_rsa -> ['ssh-rsa', 'ssh-dss'];
+ 'ssh-dss' -> ['ssh-dss', 'ssh-rsa'];
+ ssh_dsa -> ['ssh-dss', 'ssh-rsa'];
+ _ -> error({eoptions, {public_key_alg,V},
+ "Unknown algorithm, try pref_public_key_algs instead"})
+ end,
+ save({pref_public_key_algs,New}, Defs, Vals);
+
+%% Special case for socket options 'inet' and 'inet6'
+save(Inet, Defs, OptMap) when Inet==inet ; Inet==inet6 ->
+ save({inet,Inet}, Defs, OptMap);
+
+%% Two clauses to prepare for a proplists:unfold
+save({Inet,true}, Defs, OptMap) when Inet==inet ; Inet==inet6 -> save({inet,Inet}, Defs, OptMap);
+save({Inet,false}, _Defs, OptMap) when Inet==inet ; Inet==inet6 -> OptMap;
+
+%% and finaly the 'real stuff':
+save({Key,Value}, Defs, OptMap) when is_map(OptMap) ->
+ try (check_fun(Key,Defs))(Value)
+ of
+ true ->
+ OptMap#{Key := Value};
+ {true, ModifiedValue} ->
+ OptMap#{Key := ModifiedValue};
+ false ->
+ error({eoptions, {Key,Value}, "Bad value"})
+ catch
+ %% An unknown Key (= not in the definition map) is
+ %% regarded as an inet option:
+ error:{badkey,{inet,def}} ->
+ %% atomic (= non-tuple) options 'inet' and 'inet6':
+ OptMap#{socket_options := [Value | maps:get(socket_options,OptMap)]};
+ error:{badkey,{Key,def}} ->
+ OptMap#{socket_options := [{Key,Value} | maps:get(socket_options,OptMap)]};
+
+ %% But a Key that is known but the value does not validate
+ %% by the check fun will give an error exception:
+ error:{check,{BadValue,Extra}} ->
+ error({eoptions, {Key,BadValue}, Extra})
+ end.
+
+%%%================================================================
+%%%
+%%% Default options
+%%%
+
+-spec default(role() | common) -> option_declarations() .
+
+default(server) ->
+ (default(common))
+ #{
+ {subsystems, def} =>
+ #{default => [ssh_sftpd:subsystem_spec([])],
+ chk => fun(L) ->
+ is_list(L) andalso
+ lists:all(fun({Name,{CB,Args}}) ->
+ check_string(Name) andalso
+ is_atom(CB) andalso
+ is_list(Args);
+ (_) ->
+ false
+ end, L)
+ end,
+ class => user_options
+ },
+
+ {shell, def} =>
+ #{default => {shell, start, []},
+ chk => fun({M,F,A}) -> is_atom(M) andalso is_atom(F) andalso is_list(A);
+ (V) -> check_function1(V) orelse check_function2(V)
+ end,
+ class => user_options
+ },
+
+ {exec, def} => % FIXME: need some archeology....
+ #{default => undefined,
+ chk => fun({M,F,_}) -> is_atom(M) andalso is_atom(F);
+ (V) -> is_function(V)
+ end,
+ class => user_options
+ },
+
+ {ssh_cli, def} =>
+ #{default => undefined,
+ chk => fun({Cb, As}) -> is_atom(Cb) andalso is_list(As);
+ (V) -> V == no_cli
+ end,
+ class => user_options
+ },
+
+ {system_dir, def} =>
+ #{default => "/etc/ssh",
+ chk => fun(V) -> check_string(V) andalso check_dir(V) end,
+ class => user_options
+ },
+
+ {auth_methods, def} =>
+ #{default => ?SUPPORTED_AUTH_METHODS,
+ chk => fun check_string/1,
+ class => user_options
+ },
+
+ {auth_method_kb_interactive_data, def} =>
+ #{default => undefined, % Default value can be constructed when User is known
+ chk => fun({S1,S2,S3,B}) ->
+ check_string(S1) andalso
+ check_string(S2) andalso
+ check_string(S3) andalso
+ is_boolean(B);
+ (F) ->
+ check_function3(F)
+ end,
+ class => user_options
+ },
+
+ {user_passwords, def} =>
+ #{default => [],
+ chk => fun(V) ->
+ is_list(V) andalso
+ lists:all(fun({S1,S2}) ->
+ check_string(S1) andalso
+ check_string(S2)
+ end, V)
+ end,
+ class => user_options
+ },
+
+ {password, def} =>
+ #{default => undefined,
+ chk => fun check_string/1,
+ class => user_options
+ },
+
+ {dh_gex_groups, def} =>
+ #{default => undefined,
+ chk => fun check_dh_gex_groups/1,
+ class => user_options
+ },
+
+ {dh_gex_limits, def} =>
+ #{default => {0, infinity},
+ chk => fun({I1,I2}) ->
+ check_pos_integer(I1) andalso
+ check_pos_integer(I2) andalso
+ I1 < I2;
+ (_) ->
+ false
+ end,
+ class => user_options
+ },
+
+ {pwdfun, def} =>
+ #{default => undefined,
+ chk => fun(V) -> check_function4(V) orelse check_function2(V) end,
+ class => user_options
+ },
+
+ {negotiation_timeout, def} =>
+ #{default => 2*60*1000,
+ chk => fun check_timeout/1,
+ class => user_options
+ },
+
+ {max_sessions, def} =>
+ #{default => infinity,
+ chk => fun check_pos_integer/1,
+ class => user_options
+ },
+
+ {max_channels, def} =>
+ #{default => infinity,
+ chk => fun check_pos_integer/1,
+ class => user_options
+ },
+
+ {parallel_login, def} =>
+ #{default => false,
+ chk => fun erlang:is_boolean/1,
+ class => user_options
+ },
+
+ {minimal_remote_max_packet_size, def} =>
+ #{default => 0,
+ chk => fun check_pos_integer/1,
+ class => user_options
+ },
+
+ {failfun, def} =>
+ #{default => fun(_,_,_) -> void end,
+ chk => fun(V) -> check_function3(V) orelse
+ check_function2(V) % Backwards compatibility
+ end,
+ class => user_options
+ },
+
+ {connectfun, def} =>
+ #{default => fun(_,_,_) -> void end,
+ chk => fun check_function3/1,
+ class => user_options
+ },
+
+%%%%% Undocumented
+ {infofun, def} =>
+ #{default => fun(_,_,_) -> void end,
+ chk => fun(V) -> check_function3(V) orelse
+ check_function2(V) % Backwards compatibility
+ end,
+ class => user_options
+ }
+ };
+
+default(client) ->
+ (default(common))
+ #{
+ {dsa_pass_phrase, def} =>
+ #{default => undefined,
+ chk => fun check_string/1,
+ class => user_options
+ },
+
+ {rsa_pass_phrase, def} =>
+ #{default => undefined,
+ chk => fun check_string/1,
+ class => user_options
+ },
+
+ {silently_accept_hosts, def} =>
+ #{default => false,
+ chk => fun check_silently_accept_hosts/1,
+ class => user_options
+ },
+
+ {user_interaction, def} =>
+ #{default => true,
+ chk => fun erlang:is_boolean/1,
+ class => user_options
+ },
+
+ {pref_public_key_algs, def} =>
+ #{default =>
+ %% Get dynamically supported keys in the order of the ?SUPPORTED_USER_KEYS
+ [A || A <- ?SUPPORTED_USER_KEYS,
+ lists:member(A, ssh_transport:supported_algorithms(public_key))],
+ chk =>
+ fun check_pref_public_key_algs/1,
+ class =>
+ ssh
+ },
+
+ {dh_gex_limits, def} =>
+ #{default => {1024, 6144, 8192}, % FIXME: Is this true nowadays?
+ chk => fun({Min,I,Max}) ->
+ lists:all(fun check_pos_integer/1,
+ [Min,I,Max]);
+ (_) -> false
+ end,
+ class => user_options
+ },
+
+ {connect_timeout, def} =>
+ #{default => infinity,
+ chk => fun check_timeout/1,
+ class => user_options
+ },
+
+ {user, def} =>
+ #{default =>
+ begin
+ Env = case os:type() of
+ {win32, _} -> "USERNAME";
+ {unix, _} -> "LOGNAME"
+ end,
+ case os:getenv(Env) of
+ false ->
+ case os:getenv("USER") of
+ false -> undefined;
+ User -> User
+ end;
+ User ->
+ User
+ end
+ end,
+ chk => fun check_string/1,
+ class => user_options
+ },
+
+ {password, def} =>
+ #{default => undefined,
+ chk => fun check_string/1,
+ class => user_options
+ },
+
+ {quiet_mode, def} =>
+ #{default => false,
+ chk => fun erlang:is_boolean/1,
+ class => user_options
+ },
+
+ {idle_time, def} =>
+ #{default => infinity,
+ chk => fun check_timeout/1,
+ class => user_options
+ },
+
+%%%%% Undocumented
+ {keyboard_interact_fun, def} =>
+ #{default => undefined,
+ chk => fun check_function3/1,
+ class => user_options
+ }
+ };
+
+default(common) ->
+ #{
+ {user_dir, def} =>
+ #{default => false, % FIXME: TBD ~/.ssh at time of call when user is known
+ chk => fun(V) -> check_string(V) andalso check_dir(V) end,
+ class => user_options
+ },
+
+ {preferred_algorithms, def} =>
+ #{default => ssh:default_algorithms(),
+ chk => fun check_preferred_algorithms/1,
+ class => user_options
+ },
+
+ {id_string, def} =>
+ #{default => undefined, % FIXME: see ssh_transport:ssh_vsn/0
+ chk => fun(random) ->
+ {true, {random,2,5}}; % 2 - 5 random characters
+ ({random,I1,I2}) ->
+ %% Undocumented
+ check_pos_integer(I1) andalso
+ check_pos_integer(I2) andalso
+ I1=<I2;
+ (V) ->
+ check_string(V)
+ end,
+ class => user_options
+ },
+
+ {key_cb, def} =>
+ #{default => {ssh_file, []},
+ chk => fun({Mod,Opts}) -> is_atom(Mod) andalso is_list(Opts);
+ (Mod) when is_atom(Mod) -> {true, {Mod,[]}};
+ (_) -> false
+ end,
+ class => user_options
+ },
+
+ {profile, def} =>
+ #{default => ?DEFAULT_PROFILE,
+ chk => fun erlang:is_atom/1,
+ class => user_options
+ },
+
+ %% This is a "SocketOption"...
+ %% {fd, def} =>
+ %% #{default => undefined,
+ %% chk => fun erlang:is_integer/1,
+ %% class => user_options
+ %% },
+
+ {disconnectfun, def} =>
+ #{default => fun(_) -> void end,
+ chk => fun check_function1/1,
+ class => user_options
+ },
+
+ {unexpectedfun, def} =>
+ #{default => fun(_,_) -> report end,
+ chk => fun check_function2/1,
+ class => user_options
+ },
+
+ {ssh_msg_debug_fun, def} =>
+ #{default => fun(_,_,_,_) -> void end,
+ chk => fun check_function4/1,
+ class => user_options
+ },
+
+ {rekey_limit, def} => % FIXME: Why not common?
+ #{default => 1024000000,
+ chk => fun check_non_neg_integer/1,
+ class => user_options
+ },
+
+%%%%% Undocumented
+ {transport, def} =>
+ #{default => ?DEFAULT_TRANSPORT,
+ chk => fun({A,B,C}) ->
+ is_atom(A) andalso is_atom(B) andalso is_atom(C)
+ end,
+ class => user_options
+ },
+
+ {vsn, def} =>
+ #{default => {2,0},
+ chk => fun({Maj,Min}) -> check_non_neg_integer(Maj) andalso check_non_neg_integer(Min);
+ (_) -> false
+ end,
+ class => user_options
+ },
+
+ {tstflg, def} =>
+ #{default => [],
+ chk => fun erlang:is_list/1,
+ class => user_options
+ },
+
+ {user_dir_fun, def} =>
+ #{default => undefined,
+ chk => fun check_function1/1,
+ class => user_options
+ },
+
+ {max_random_length_padding, def} =>
+ #{default => ?MAX_RND_PADDING_LEN,
+ chk => fun check_non_neg_integer/1,
+ class => user_options
+ }
+ }.
+
+
+%%%================================================================
+%%%================================================================
+%%%================================================================
+
+%%%
+%%% check_*/1 -> true | false | error({check,Spec})
+%%% See error_in_check/2,3
+%%%
+
+%%% error_in_check(BadValue) -> error_in_check(BadValue, undefined).
+
+error_in_check(BadValue, Extra) -> error({check,{BadValue,Extra}}).
+
+
+%%%----------------------------------------------------------------
+check_timeout(infinity) -> true;
+check_timeout(I) -> check_pos_integer(I).
+
+%%%----------------------------------------------------------------
+check_pos_integer(I) -> is_integer(I) andalso I>0.
+
+%%%----------------------------------------------------------------
+check_non_neg_integer(I) -> is_integer(I) andalso I>=0.
+
+%%%----------------------------------------------------------------
+check_function1(F) -> is_function(F,1).
+check_function2(F) -> is_function(F,2).
+check_function3(F) -> is_function(F,3).
+check_function4(F) -> is_function(F,4).
+
+%%%----------------------------------------------------------------
+check_pref_public_key_algs(V) ->
+ %% Get the dynamically supported keys, that is, thoose
+ %% that are stored
+ PKs = ssh_transport:supported_algorithms(public_key),
+ CHK = fun(A, Ack) ->
+ case lists:member(A, PKs) of
+ true ->
+ [A|Ack];
+ false ->
+ %% Check with the documented options, that is,
+ %% the one we can handle
+ case lists:member(A,?SUPPORTED_USER_KEYS) of
+ false ->
+ %% An algorithm ssh never can handle
+ error_in_check(A, "Not supported public key");
+ true ->
+ %% An algorithm ssh can handle, but not in
+ %% this very call
+ Ack
+ end
+ end
+ end,
+ case lists:foldr(
+ fun(ssh_dsa, Ack) -> CHK('ssh-dss', Ack); % compatibility
+ (ssh_rsa, Ack) -> CHK('ssh-rsa', Ack); % compatibility
+ (X, Ack) -> CHK(X, Ack)
+ end, [], V)
+ of
+ V -> true;
+ [] -> false;
+ V1 -> {true,V1}
+ end.
+
+
+%%%----------------------------------------------------------------
+%% Check that it is a directory and is readable
+check_dir(Dir) ->
+ case file:read_file_info(Dir) of
+ {ok, #file_info{type = directory,
+ access = Access}} ->
+ case Access of
+ read -> true;
+ read_write -> true;
+ _ -> error_in_check(Dir, eacces)
+ end;
+
+ {ok, #file_info{}}->
+ error_in_check(Dir, enotdir);
+
+ {error, Error} ->
+ error_in_check(Dir, Error)
+ end.
+
+%%%----------------------------------------------------------------
+check_string(S) -> is_list(S). % FIXME: stub
+
+%%%----------------------------------------------------------------
+check_dh_gex_groups({file,File}) when is_list(File) ->
+ case file:consult(File) of
+ {ok, GroupDefs} ->
+ check_dh_gex_groups(GroupDefs);
+ {error, Error} ->
+ error_in_check({file,File},Error)
+ end;
+
+check_dh_gex_groups({ssh_moduli_file,File}) when is_list(File) ->
+ case file:open(File,[read]) of
+ {ok,D} ->
+ try
+ read_moduli_file(D, 1, [])
+ of
+ {ok,Moduli} ->
+ check_dh_gex_groups(Moduli);
+ {error,Error} ->
+ error_in_check({ssh_moduli_file,File}, Error)
+ catch
+ _:_ ->
+ error_in_check({ssh_moduli_file,File}, "Bad format in file "++File)
+ after
+ file:close(D)
+ end;
+
+ {error, Error} ->
+ error_in_check({ssh_moduli_file,File}, Error)
+ end;
+
+check_dh_gex_groups(L0) when is_list(L0), is_tuple(hd(L0)) ->
+ {true,
+ collect_per_size(
+ lists:foldl(
+ fun({N,G,P}, Acc) when is_integer(N),N>0,
+ is_integer(G),G>0,
+ is_integer(P),P>0 ->
+ [{N,{G,P}} | Acc];
+ ({N,{G,P}}, Acc) when is_integer(N),N>0,
+ is_integer(G),G>0,
+ is_integer(P),P>0 ->
+ [{N,{G,P}} | Acc];
+ ({N,GPs}, Acc) when is_list(GPs) ->
+ lists:foldr(fun({Gi,Pi}, Acci) when is_integer(Gi),Gi>0,
+ is_integer(Pi),Pi>0 ->
+ [{N,{Gi,Pi}} | Acci]
+ end, Acc, GPs)
+ end, [], L0))};
+
+check_dh_gex_groups(_) ->
+ false.
+
+
+
+collect_per_size(L) ->
+ lists:foldr(
+ fun({Sz,GP}, [{Sz,GPs}|Acc]) -> [{Sz,[GP|GPs]}|Acc];
+ ({Sz,GP}, Acc) -> [{Sz,[GP]}|Acc]
+ end, [], lists:sort(L)).
+
+read_moduli_file(D, I, Acc) ->
+ case io:get_line(D,"") of
+ {error,Error} ->
+ {error,Error};
+ eof ->
+ {ok, Acc};
+ "#" ++ _ -> read_moduli_file(D, I+1, Acc);
+ <<"#",_/binary>> -> read_moduli_file(D, I+1, Acc);
+ Data ->
+ Line = if is_binary(Data) -> binary_to_list(Data);
+ is_list(Data) -> Data
+ end,
+ try
+ [_Time,_Class,_Tests,_Tries,Size,G,P] = string:tokens(Line," \r\n"),
+ M = {list_to_integer(Size),
+ {list_to_integer(G), list_to_integer(P,16)}
+ },
+ read_moduli_file(D, I+1, [M|Acc])
+ catch
+ _:_ ->
+ read_moduli_file(D, I+1, Acc)
+ end
+ end.
+
+%%%----------------------------------------------------------------
+-define(SHAs, [md5, sha, sha224, sha256, sha384, sha512]).
+
+check_silently_accept_hosts(B) when is_boolean(B) -> true;
+check_silently_accept_hosts(F) when is_function(F,2) -> true;
+check_silently_accept_hosts({S,F}) when is_atom(S),
+ is_function(F,2) ->
+ lists:member(S, ?SHAs) andalso
+ lists:member(S, proplists:get_value(hashs,crypto:supports()));
+check_silently_accept_hosts({L,F}) when is_list(L),
+ is_function(F,2) ->
+ lists:all(fun(S) ->
+ lists:member(S, ?SHAs) andalso
+ lists:member(S, proplists:get_value(hashs,crypto:supports()))
+ end, L);
+check_silently_accept_hosts(_) -> false.
+
+%%%----------------------------------------------------------------
+check_preferred_algorithms(Algs) ->
+ try alg_duplicates(Algs, [], [])
+ of
+ [] ->
+ {true,
+ [try ssh_transport:supported_algorithms(Key)
+ of
+ DefAlgs -> handle_pref_alg(Key,Vals,DefAlgs)
+ catch
+ _:_ -> error_in_check(Key,"Bad preferred_algorithms key")
+ end || {Key,Vals} <- Algs]
+ };
+
+ Dups ->
+ error_in_check(Dups, "Duplicates")
+ catch
+ _:_ ->
+ false
+ end.
+
+alg_duplicates([{K,V}|KVs], Ks, Dups0) ->
+ Dups =
+ case lists:member(K,Ks) of
+ true -> [K|Dups0];
+ false -> Dups0
+ end,
+ case V--lists:usort(V) of
+ [] -> alg_duplicates(KVs, [K|Ks], Dups);
+ Ds -> alg_duplicates(KVs, [K|Ks], Dups++Ds)
+ end;
+alg_duplicates([], _Ks, Dups) ->
+ Dups.
+
+handle_pref_alg(Key,
+ Vs=[{client2server,C2Ss=[_|_]},{server2client,S2Cs=[_|_]}],
+ [{client2server,Sup_C2Ss},{server2client,Sup_S2Cs}]
+ ) ->
+ chk_alg_vs(Key, C2Ss, Sup_C2Ss),
+ chk_alg_vs(Key, S2Cs, Sup_S2Cs),
+ {Key, Vs};
+
+handle_pref_alg(Key,
+ Vs=[{server2client,[_|_]},{client2server,[_|_]}],
+ Sup=[{client2server,_},{server2client,_}]
+ ) ->
+ handle_pref_alg(Key, lists:reverse(Vs), Sup);
+
+handle_pref_alg(Key,
+ Vs=[V|_],
+ Sup=[{client2server,_},{server2client,_}]
+ ) when is_atom(V) ->
+ handle_pref_alg(Key, [{client2server,Vs},{server2client,Vs}], Sup);
+
+handle_pref_alg(Key,
+ Vs=[V|_],
+ Sup=[S|_]
+ ) when is_atom(V), is_atom(S) ->
+ chk_alg_vs(Key, Vs, Sup),
+ {Key, Vs};
+
+handle_pref_alg(Key, Vs, _) ->
+ error_in_check({Key,Vs}, "Badly formed list").
+
+chk_alg_vs(OptKey, Values, SupportedValues) ->
+ case (Values -- SupportedValues) of
+ [] -> Values;
+ Bad -> error_in_check({OptKey,Bad}, "Unsupported value(s) found")
+ end.
+
+%%%----------------------------------------------------------------
+forbidden_option(K,V) ->
+ Txt = io_lib:format("The option '~s' is used internally. The "
+ "user is not allowed to specify this option.",
+ [K]),
+ error({eoptions, {K,V}, Txt}).
+
+%%%----------------------------------------------------------------
diff --git a/lib/ssh/src/ssh_sftp.erl b/lib/ssh/src/ssh_sftp.erl
index b937f0412d..140856c8e3 100644
--- a/lib/ssh/src/ssh_sftp.erl
+++ b/lib/ssh/src/ssh_sftp.erl
@@ -100,18 +100,14 @@ start_channel(Socket) when is_port(Socket) ->
start_channel(Host) when is_list(Host) ->
start_channel(Host, []).
-start_channel(Socket, Options) when is_port(Socket) ->
- Timeout =
- %% A mixture of ssh:connect and ssh_sftp:start_channel:
- case proplists:get_value(connect_timeout, Options, undefined) of
- undefined ->
- proplists:get_value(timeout, Options, infinity);
- TO ->
- TO
- end,
- case ssh:connect(Socket, Options, Timeout) of
+start_channel(Socket, UserOptions) when is_port(Socket) ->
+ {SshOpts, _ChanOpts, SftpOpts} = handle_options(UserOptions),
+ Timeout = % A mixture of ssh:connect and ssh_sftp:start_channel:
+ proplists:get_value(connect_timeout, SshOpts,
+ proplists:get_value(timeout, SftpOpts, infinity)),
+ case ssh:connect(Socket, SshOpts, Timeout) of
{ok,Cm} ->
- case start_channel(Cm, Options) of
+ case start_channel(Cm, UserOptions) of
{ok, Pid} ->
{ok, Pid, Cm};
Error ->
@@ -120,9 +116,9 @@ start_channel(Socket, Options) when is_port(Socket) ->
Error ->
Error
end;
-start_channel(Cm, Opts) when is_pid(Cm) ->
- Timeout = proplists:get_value(timeout, Opts, infinity),
- {_, ChanOpts, SftpOpts} = handle_options(Opts, [], [], []),
+start_channel(Cm, UserOptions) when is_pid(Cm) ->
+ Timeout = proplists:get_value(timeout, UserOptions, infinity),
+ {_SshOpts, ChanOpts, SftpOpts} = handle_options(UserOptions),
case ssh_xfer:attach(Cm, [], ChanOpts) of
{ok, ChannelId, Cm} ->
case ssh_channel:start(Cm, ChannelId,
@@ -143,15 +139,17 @@ start_channel(Cm, Opts) when is_pid(Cm) ->
Error
end;
-start_channel(Host, Opts) ->
- start_channel(Host, 22, Opts).
-start_channel(Host, Port, Opts) ->
- {SshOpts, ChanOpts, SftpOpts} = handle_options(Opts, [], [], []),
- Timeout = proplists:get_value(timeout, SftpOpts, infinity),
+start_channel(Host, UserOptions) ->
+ start_channel(Host, 22, UserOptions).
+
+start_channel(Host, Port, UserOptions) ->
+ {SshOpts, ChanOpts, SftpOpts} = handle_options(UserOptions),
+ Timeout = % A mixture of ssh:connect and ssh_sftp:start_channel:
+ proplists:get_value(connect_timeout, SshOpts,
+ proplists:get_value(timeout, SftpOpts, infinity)),
case ssh_xfer:connect(Host, Port, SshOpts, ChanOpts, Timeout) of
{ok, ChannelId, Cm} ->
- case ssh_channel:start(Cm, ChannelId, ?MODULE, [Cm,
- ChannelId, SftpOpts]) of
+ case ssh_channel:start(Cm, ChannelId, ?MODULE, [Cm,ChannelId,SftpOpts]) of
{ok, Pid} ->
case wait_for_version_negotiation(Pid, Timeout) of
ok ->
@@ -294,7 +292,7 @@ read(Pid, Handle, Len) ->
read(Pid, Handle, Len, FileOpTimeout) ->
call(Pid, {read,false,Handle, Len}, FileOpTimeout).
-%% TODO this ought to be a cast! Is so in all practial meaning
+%% TODO this ought to be a cast! Is so in all practical meaning
%% even if it is obscure!
apread(Pid, Handle, Offset, Len) ->
call(Pid, {pread,true,Handle, Offset, Len}, infinity).
@@ -313,12 +311,12 @@ write(Pid, Handle, Data) ->
write(Pid, Handle, Data, FileOpTimeout) ->
call(Pid, {write,false,Handle,Data}, FileOpTimeout).
-%% TODO this ought to be a cast! Is so in all practial meaning
+%% TODO this ought to be a cast! Is so in all practical meaning
%% even if it is obscure!
apwrite(Pid, Handle, Offset, Data) ->
call(Pid, {pwrite,true,Handle,Offset,Data}, infinity).
-%% TODO this ought to be a cast! Is so in all practial meaning
+%% TODO this ought to be a cast! Is so in all practical meaning
%% even if it is obscure!
awrite(Pid, Handle, Data) ->
call(Pid, {write,true,Handle,Data}, infinity).
@@ -865,6 +863,9 @@ terminate(_Reason, State) ->
%%====================================================================
%% Internal functions
%%====================================================================
+handle_options(UserOptions) ->
+ handle_options(UserOptions, [], [], []).
+
handle_options([], Sftp, Chan, Ssh) ->
{Ssh, Chan, Sftp};
handle_options([{timeout, _} = Opt | Rest], Sftp, Chan, Ssh) ->
diff --git a/lib/ssh/src/ssh_sftpd.erl b/lib/ssh/src/ssh_sftpd.erl
index b739955836..9352046795 100644
--- a/lib/ssh/src/ssh_sftpd.erl
+++ b/lib/ssh/src/ssh_sftpd.erl
@@ -664,29 +664,25 @@ open(Vsn, ReqId, Data, State) when Vsn >= 4 ->
do_open(ReqId, State, Path, Flags).
do_open(ReqId, State0, Path, Flags) ->
- #state{file_handler = FileMod, file_state = FS0, root = Root, xf = #ssh_xfer{vsn = Vsn}} = State0,
- XF = State0#state.xf,
- F = [binary | Flags],
- {IsDir, _FS1} = FileMod:is_dir(Path, FS0),
+ #state{file_handler = FileMod, file_state = FS0, xf = #ssh_xfer{vsn = Vsn}} = State0,
+ AbsPath = relate_file_name(Path, State0),
+ {IsDir, _FS1} = FileMod:is_dir(AbsPath, FS0),
case IsDir of
true when Vsn > 5 ->
ssh_xfer:xf_send_status(State0#state.xf, ReqId,
- ?SSH_FX_FILE_IS_A_DIRECTORY, "File is a directory");
+ ?SSH_FX_FILE_IS_A_DIRECTORY, "File is a directory"),
+ State0;
true ->
ssh_xfer:xf_send_status(State0#state.xf, ReqId,
- ?SSH_FX_FAILURE, "File is a directory");
+ ?SSH_FX_FAILURE, "File is a directory"),
+ State0;
false ->
- AbsPath = case Root of
- "" ->
- Path;
- _ ->
- relate_file_name(Path, State0)
- end,
- {Res, FS1} = FileMod:open(AbsPath, F, FS0),
+ OpenFlags = [binary | Flags],
+ {Res, FS1} = FileMod:open(AbsPath, OpenFlags, FS0),
State1 = State0#state{file_state = FS1},
case Res of
{ok, IoDevice} ->
- add_handle(State1, XF, ReqId, file, {Path,IoDevice});
+ add_handle(State1, State0#state.xf, ReqId, file, {Path,IoDevice});
{error, Error} ->
ssh_xfer:xf_send_status(State1#state.xf, ReqId,
ssh_xfer:encode_erlang_status(Error)),
@@ -742,6 +738,10 @@ resolve_symlinks_2([], State, _LinkCnt, AccPath) ->
{{ok, AccPath}, State}.
+%% The File argument is always in a user visible file system, i.e.
+%% is under Root and is relative to CWD or Root, if starts with "/".
+%% The result of the function is always an absolute path in a
+%% "backend" file system.
relate_file_name(File, State) ->
relate_file_name(File, State, _Canonicalize=true).
@@ -749,19 +749,20 @@ relate_file_name(File, State, Canonicalize) when is_binary(File) ->
relate_file_name(unicode:characters_to_list(File), State, Canonicalize);
relate_file_name(File, #state{cwd = CWD, root = ""}, Canonicalize) ->
relate_filename_to_path(File, CWD, Canonicalize);
-relate_file_name(File, #state{root = Root}, Canonicalize) ->
- case is_within_root(Root, File) of
- true ->
- File;
- false ->
- RelFile = make_relative_filename(File),
- NewFile = relate_filename_to_path(RelFile, Root, Canonicalize),
- case is_within_root(Root, NewFile) of
- true ->
- NewFile;
- false ->
- Root
- end
+relate_file_name(File, #state{cwd = CWD, root = Root}, Canonicalize) ->
+ CWD1 = case is_within_root(Root, CWD) of
+ true -> CWD;
+ false -> Root
+ end,
+ AbsFile = case make_relative_filename(File) of
+ File ->
+ relate_filename_to_path(File, CWD1, Canonicalize);
+ RelFile ->
+ relate_filename_to_path(RelFile, Root, Canonicalize)
+ end,
+ case is_within_root(Root, AbsFile) of
+ true -> AbsFile;
+ false -> Root
end.
is_within_root(Root, File) ->
diff --git a/lib/ssh/src/ssh_sftpd_file_api.erl b/lib/ssh/src/ssh_sftpd_file_api.erl
index 78f452df67..e444e52ac0 100644
--- a/lib/ssh/src/ssh_sftpd_file_api.erl
+++ b/lib/ssh/src/ssh_sftpd_file_api.erl
@@ -36,7 +36,7 @@
-callback list_dir(file:name(), State::term()) ->
{{ok, Filenames::term()}, State::term()} | {{error, Reason::term()}, State::term()}.
-callback make_dir(Dir::term(), State::term()) ->
- {{ok, State::term()},State::term()} | {{error, Reason::term()}, State::term()}.
+ {ok, State::term()} | {{error, Reason::term()}, State::term()}.
-callback make_symlink(Path2::term(), Path::term(), State::term()) ->
{ok, State::term()} | {{error, Reason::term()}, State::term()}.
-callback open(Path::term(), Flags::term(), State::term()) ->
diff --git a/lib/ssh/src/ssh_subsystem_sup.erl b/lib/ssh/src/ssh_subsystem_sup.erl
index 637f5f398f..cf82db458f 100644
--- a/lib/ssh/src/ssh_subsystem_sup.erl
+++ b/lib/ssh/src/ssh_subsystem_sup.erl
@@ -26,6 +26,8 @@
-behaviour(supervisor).
+-include("ssh.hrl").
+
-export([start_link/1,
connection_supervisor/1,
channel_supervisor/1
@@ -37,8 +39,8 @@
%%%=========================================================================
%%% API
%%%=========================================================================
-start_link(Opts) ->
- supervisor:start_link(?MODULE, [Opts]).
+start_link(Options) ->
+ supervisor:start_link(?MODULE, [Options]).
connection_supervisor(SupPid) ->
Children = supervisor:which_children(SupPid),
@@ -53,42 +55,42 @@ channel_supervisor(SupPid) ->
%%%=========================================================================
-spec init( [term()] ) -> {ok,{supervisor:sup_flags(),[supervisor:child_spec()]}} | ignore .
-init([Opts]) ->
+init([Options]) ->
RestartStrategy = one_for_all,
MaxR = 0,
MaxT = 3600,
- Children = child_specs(Opts),
+ Children = child_specs(Options),
{ok, {{RestartStrategy, MaxR, MaxT}, Children}}.
%%%=========================================================================
%%% Internal functions
%%%=========================================================================
-child_specs(Opts) ->
- case proplists:get_value(role, Opts) of
+child_specs(Options) ->
+ case ?GET_INTERNAL_OPT(role, Options) of
client ->
[];
server ->
- [ssh_channel_child_spec(Opts), ssh_connectinon_child_spec(Opts)]
+ [ssh_channel_child_spec(Options), ssh_connectinon_child_spec(Options)]
end.
-ssh_connectinon_child_spec(Opts) ->
- Address = proplists:get_value(address, Opts),
- Port = proplists:get_value(port, Opts),
- Role = proplists:get_value(role, Opts),
+ssh_connectinon_child_spec(Options) ->
+ Address = ?GET_INTERNAL_OPT(address, Options),
+ Port = ?GET_INTERNAL_OPT(port, Options),
+ Role = ?GET_INTERNAL_OPT(role, Options),
Name = id(Role, ssh_connection_sup, Address, Port),
- StartFunc = {ssh_connection_sup, start_link, [Opts]},
+ StartFunc = {ssh_connection_sup, start_link, [Options]},
Restart = temporary,
Shutdown = 5000,
Modules = [ssh_connection_sup],
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-ssh_channel_child_spec(Opts) ->
- Address = proplists:get_value(address, Opts),
- Port = proplists:get_value(port, Opts),
- Role = proplists:get_value(role, Opts),
+ssh_channel_child_spec(Options) ->
+ Address = ?GET_INTERNAL_OPT(address, Options),
+ Port = ?GET_INTERNAL_OPT(port, Options),
+ Role = ?GET_INTERNAL_OPT(role, Options),
Name = id(Role, ssh_channel_sup, Address, Port),
- StartFunc = {ssh_channel_sup, start_link, [Opts]},
+ StartFunc = {ssh_channel_sup, start_link, [Options]},
Restart = temporary,
Shutdown = infinity,
Modules = [ssh_channel_sup],
diff --git a/lib/ssh/src/ssh_system_sup.erl b/lib/ssh/src/ssh_system_sup.erl
index e97ac7b01a..b0bbd3aae5 100644
--- a/lib/ssh/src/ssh_system_sup.erl
+++ b/lib/ssh/src/ssh_system_sup.erl
@@ -45,12 +45,12 @@
%%%=========================================================================
%%% Internal API
%%%=========================================================================
-start_link(ServerOpts) ->
- Address = proplists:get_value(address, ServerOpts),
- Port = proplists:get_value(port, ServerOpts),
- Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE),
+start_link(Options) ->
+ Address = ?GET_INTERNAL_OPT(address, Options),
+ Port = ?GET_INTERNAL_OPT(port, Options),
+ Profile = ?GET_OPT(profile, Options),
Name = make_name(Address, Port, Profile),
- supervisor:start_link({local, Name}, ?MODULE, [ServerOpts]).
+ supervisor:start_link({local, Name}, ?MODULE, [Options]).
stop_listener(SysSup) ->
stop_acceptor(SysSup).
@@ -127,12 +127,12 @@ restart_acceptor(Address, Port, Profile) ->
%%%=========================================================================
-spec init( [term()] ) -> {ok,{supervisor:sup_flags(),[supervisor:child_spec()]}} | ignore .
-init([ServerOpts]) ->
+init([Options]) ->
RestartStrategy = one_for_one,
MaxR = 0,
MaxT = 3600,
- Children = case proplists:get_value(asocket,ServerOpts) of
- undefined -> child_specs(ServerOpts);
+ Children = case ?GET_INTERNAL_OPT(asocket,Options,undefined) of
+ undefined -> child_specs(Options);
_ -> []
end,
{ok, {{RestartStrategy, MaxR, MaxT}, Children}}.
@@ -140,24 +140,24 @@ init([ServerOpts]) ->
%%%=========================================================================
%%% Internal functions
%%%=========================================================================
-child_specs(ServerOpts) ->
- [ssh_acceptor_child_spec(ServerOpts)].
+child_specs(Options) ->
+ [ssh_acceptor_child_spec(Options)].
-ssh_acceptor_child_spec(ServerOpts) ->
- Address = proplists:get_value(address, ServerOpts),
- Port = proplists:get_value(port, ServerOpts),
- Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE),
+ssh_acceptor_child_spec(Options) ->
+ Address = ?GET_INTERNAL_OPT(address, Options),
+ Port = ?GET_INTERNAL_OPT(port, Options),
+ Profile = ?GET_OPT(profile, Options),
Name = id(ssh_acceptor_sup, Address, Port, Profile),
- StartFunc = {ssh_acceptor_sup, start_link, [ServerOpts]},
+ StartFunc = {ssh_acceptor_sup, start_link, [Options]},
Restart = transient,
Shutdown = infinity,
Modules = [ssh_acceptor_sup],
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-ssh_subsystem_child_spec(ServerOpts) ->
+ssh_subsystem_child_spec(Options) ->
Name = make_ref(),
- StartFunc = {ssh_subsystem_sup, start_link, [ServerOpts]},
+ StartFunc = {ssh_subsystem_sup, start_link, [Options]},
Restart = temporary,
Shutdown = infinity,
Modules = [ssh_subsystem_sup],
diff --git a/lib/ssh/src/ssh_transport.erl b/lib/ssh/src/ssh_transport.erl
index 21ba34506a..02c995399a 100644
--- a/lib/ssh/src/ssh_transport.erl
+++ b/lib/ssh/src/ssh_transport.erl
@@ -44,6 +44,7 @@
handle_kexdh_reply/2,
handle_kex_ecdh_init/2,
handle_kex_ecdh_reply/2,
+ parallell_gen_key/1,
extract_public_key/1,
ssh_packet/2, pack/2,
sha/1, sign/3, verify/4]).
@@ -78,6 +79,10 @@ default_algorithms() -> [{K,default_algorithms(K)} || K <- algo_classes()].
algo_classes() -> [kex, public_key, cipher, mac, compression].
+default_algorithms(kex) ->
+ supported_algorithms(kex, [
+ 'diffie-hellman-group1-sha1' % Gone in OpenSSH 7.3.p1
+ ]);
default_algorithms(cipher) ->
supported_algorithms(cipher, same(['AEAD_AES_128_GCM',
@@ -94,34 +99,39 @@ supported_algorithms() -> [{K,supported_algorithms(K)} || K <- algo_classes()].
supported_algorithms(kex) ->
select_crypto_supported(
[
- {'ecdh-sha2-nistp256', [{public_keys,ecdh}, {ec_curve,secp256r1}, {hashs,sha256}]},
{'ecdh-sha2-nistp384', [{public_keys,ecdh}, {ec_curve,secp384r1}, {hashs,sha384}]},
- {'diffie-hellman-group14-sha1', [{public_keys,dh}, {hashs,sha}]},
+ {'ecdh-sha2-nistp521', [{public_keys,ecdh}, {ec_curve,secp521r1}, {hashs,sha512}]},
+ {'ecdh-sha2-nistp256', [{public_keys,ecdh}, {ec_curve,secp256r1}, {hashs,sha256}]},
{'diffie-hellman-group-exchange-sha256', [{public_keys,dh}, {hashs,sha256}]},
+ {'diffie-hellman-group16-sha512', [{public_keys,dh}, {hashs,sha512}]}, % In OpenSSH 7.3.p1
+ {'diffie-hellman-group18-sha512', [{public_keys,dh}, {hashs,sha512}]}, % In OpenSSH 7.3.p1
+ {'diffie-hellman-group14-sha256', [{public_keys,dh}, {hashs,sha256}]}, % In OpenSSH 7.3.p1
+ {'diffie-hellman-group14-sha1', [{public_keys,dh}, {hashs,sha}]},
{'diffie-hellman-group-exchange-sha1', [{public_keys,dh}, {hashs,sha}]},
- {'ecdh-sha2-nistp521', [{public_keys,ecdh}, {ec_curve,secp521r1}, {hashs,sha512}]},
{'diffie-hellman-group1-sha1', [{public_keys,dh}, {hashs,sha}]}
]);
supported_algorithms(public_key) ->
select_crypto_supported(
- [{'ecdsa-sha2-nistp256', [{public_keys,ecdsa}, {hashs,sha256}, {ec_curve,secp256r1}]},
+ [
{'ecdsa-sha2-nistp384', [{public_keys,ecdsa}, {hashs,sha384}, {ec_curve,secp384r1}]},
{'ecdsa-sha2-nistp521', [{public_keys,ecdsa}, {hashs,sha512}, {ec_curve,secp521r1}]},
+ {'ecdsa-sha2-nistp256', [{public_keys,ecdsa}, {hashs,sha256}, {ec_curve,secp256r1}]},
{'ssh-rsa', [{public_keys,rsa}, {hashs,sha} ]},
- {'ssh-dss', [{public_keys,dss}, {hashs,sha} ]}
+ {'ssh-dss', [{public_keys,dss}, {hashs,sha} ]} % Gone in OpenSSH 7.3.p1
]);
supported_algorithms(cipher) ->
same(
select_crypto_supported(
- [{'aes256-ctr', [{ciphers,{aes_ctr,256}}]},
- {'aes192-ctr', [{ciphers,{aes_ctr,192}}]},
- {'aes128-ctr', [{ciphers,{aes_ctr,128}}]},
- {'aes128-cbc', [{ciphers,aes_cbc128}]},
+ [
+ {'[email protected]', [{ciphers,{aes_gcm,256}}]},
+ {'aes256-ctr', [{ciphers,{aes_ctr,256}}]},
+ {'aes192-ctr', [{ciphers,{aes_ctr,192}}]},
{'[email protected]', [{ciphers,{aes_gcm,128}}]},
- {'[email protected]', [{ciphers,{aes_gcm,256}}]},
- {'AEAD_AES_128_GCM', [{ciphers,{aes_gcm,128}}]},
+ {'aes128-ctr', [{ciphers,{aes_ctr,128}}]},
{'AEAD_AES_256_GCM', [{ciphers,{aes_gcm,256}}]},
+ {'AEAD_AES_128_GCM', [{ciphers,{aes_gcm,128}}]},
+ {'aes128-cbc', [{ciphers,aes_cbc128}]},
{'3des-cbc', [{ciphers,des3_cbc}]}
]
));
@@ -143,14 +153,14 @@ supported_algorithms(compression) ->
%%%----------------------------------------------------------------------------
versions(client, Options)->
- Vsn = proplists:get_value(vsn, Options, ?DEFAULT_CLIENT_VERSION),
+ Vsn = ?GET_INTERNAL_OPT(vsn, Options, ?DEFAULT_CLIENT_VERSION),
{Vsn, format_version(Vsn, software_version(Options))};
versions(server, Options) ->
- Vsn = proplists:get_value(vsn, Options, ?DEFAULT_SERVER_VERSION),
+ Vsn = ?GET_INTERNAL_OPT(vsn, Options, ?DEFAULT_SERVER_VERSION),
{Vsn, format_version(Vsn, software_version(Options))}.
software_version(Options) ->
- case proplists:get_value(id_string, Options) of
+ case ?GET_OPT(id_string, Options) of
undefined ->
"Erlang"++ssh_vsn();
{random,Nlo,Nup} ->
@@ -161,7 +171,7 @@ software_version(Options) ->
ssh_vsn() ->
try {ok,L} = application:get_all_key(ssh),
- proplists:get_value(vsn,L,"")
+ proplists:get_value(vsn, L, "")
of
"" -> "";
VSN when is_list(VSN) -> "/" ++ VSN;
@@ -222,13 +232,7 @@ key_exchange_init_msg(Ssh0) ->
kex_init(#ssh{role = Role, opts = Opts, available_host_keys = HostKeyAlgs}) ->
Random = ssh_bits:random(16),
- PrefAlgs =
- case proplists:get_value(preferred_algorithms,Opts) of
- undefined ->
- default_algorithms();
- Algs0 ->
- Algs0
- end,
+ PrefAlgs = ?GET_OPT(preferred_algorithms, Opts),
kexinit_message(Role, Random, PrefAlgs, HostKeyAlgs).
key_init(client, Ssh, Value) ->
@@ -274,11 +278,12 @@ handle_kexinit_msg(#ssh_msg_kexinit{} = CounterPart, #ssh_msg_kexinit{} = Own,
true ->
key_exchange_first_msg(Algoritms#alg.kex,
Ssh0#ssh{algorithms = Algoritms});
- _ ->
+ {false,Alg} ->
%% TODO: Correct code?
ssh_connection_handler:disconnect(
#ssh_msg_disconnect{code = ?SSH_DISCONNECT_KEY_EXCHANGE_FAILED,
- description = "Selection of key exchange algorithm failed"
+ description = "Selection of key exchange algorithm failed: "
+ ++ Alg
})
end;
@@ -288,45 +293,60 @@ handle_kexinit_msg(#ssh_msg_kexinit{} = CounterPart, #ssh_msg_kexinit{} = Own,
case verify_algorithm(Algoritms) of
true ->
{ok, Ssh#ssh{algorithms = Algoritms}};
- _ ->
+ {false,Alg} ->
ssh_connection_handler:disconnect(
#ssh_msg_disconnect{code = ?SSH_DISCONNECT_KEY_EXCHANGE_FAILED,
- description = "Selection of key exchange algorithm failed"
+ description = "Selection of key exchange algorithm failed: "
+ ++ Alg
})
end.
-%% TODO: diffie-hellman-group14-sha1 should also be supported.
-%% Maybe check more things ...
-
-verify_algorithm(#alg{kex = undefined}) -> false;
-verify_algorithm(#alg{hkey = undefined}) -> false;
-verify_algorithm(#alg{send_mac = undefined}) -> false;
-verify_algorithm(#alg{recv_mac = undefined}) -> false;
-verify_algorithm(#alg{encrypt = undefined}) -> false;
-verify_algorithm(#alg{decrypt = undefined}) -> false;
-verify_algorithm(#alg{compress = undefined}) -> false;
-verify_algorithm(#alg{decompress = undefined}) -> false;
-verify_algorithm(#alg{kex = Kex}) -> lists:member(Kex, supported_algorithms(kex)).
+verify_algorithm(#alg{kex = undefined}) -> {false, "kex"};
+verify_algorithm(#alg{hkey = undefined}) -> {false, "hkey"};
+verify_algorithm(#alg{send_mac = undefined}) -> {false, "send_mac"};
+verify_algorithm(#alg{recv_mac = undefined}) -> {false, "recv_mac"};
+verify_algorithm(#alg{encrypt = undefined}) -> {false, "encrypt"};
+verify_algorithm(#alg{decrypt = undefined}) -> {false, "decrypt"};
+verify_algorithm(#alg{compress = undefined}) -> {false, "compress"};
+verify_algorithm(#alg{decompress = undefined}) -> {false, "decompress"};
+verify_algorithm(#alg{kex = Kex}) ->
+ case lists:member(Kex, supported_algorithms(kex)) of
+ true -> true;
+ false -> {false, "kex"}
+ end.
%%%----------------------------------------------------------------
%%%
%%% Key exchange initialization
%%%
key_exchange_first_msg(Kex, Ssh0) when Kex == 'diffie-hellman-group1-sha1' ;
- Kex == 'diffie-hellman-group14-sha1' ->
+ Kex == 'diffie-hellman-group14-sha1' ;
+ Kex == 'diffie-hellman-group14-sha256' ;
+ Kex == 'diffie-hellman-group16-sha512' ;
+ Kex == 'diffie-hellman-group18-sha512'
+ ->
{G, P} = dh_group(Kex),
- {Public, Private} = generate_key(dh, [P,G]),
+ Sz = dh_bits(Ssh0#ssh.algorithms),
+ {Public, Private} = generate_key(dh, [P,G,2*Sz]),
{SshPacket, Ssh1} = ssh_packet(#ssh_msg_kexdh_init{e = Public}, Ssh0),
{ok, SshPacket,
Ssh1#ssh{keyex_key = {{Private, Public}, {G, P}}}};
key_exchange_first_msg(Kex, Ssh0=#ssh{opts=Opts}) when Kex == 'diffie-hellman-group-exchange-sha1' ;
Kex == 'diffie-hellman-group-exchange-sha256' ->
- {Min,NBits,Max} =
- proplists:get_value(dh_gex_limits, Opts, {?DEFAULT_DH_GROUP_MIN,
- ?DEFAULT_DH_GROUP_NBITS,
- ?DEFAULT_DH_GROUP_MAX}),
+ {Min,NBits0,Max} = ?GET_OPT(dh_gex_limits, Opts),
+ DhBits = dh_bits(Ssh0#ssh.algorithms),
+ NBits1 =
+ %% NIST Special Publication 800-57 Part 1 Revision 4: Recommendation for Key Management
+ if
+ DhBits =< 112 -> 2048;
+ DhBits =< 128 -> 3072;
+ DhBits =< 192 -> 7680;
+ true -> 8192
+ end,
+ NBits = min(max(max(NBits0,NBits1),Min), Max),
+
{SshPacket, Ssh1} =
ssh_packet(#ssh_msg_kex_dh_gex_request{min = Min,
n = NBits,
@@ -348,14 +368,18 @@ key_exchange_first_msg(Kex, Ssh0) when Kex == 'ecdh-sha2-nistp256' ;
%%%
%%% diffie-hellman-group1-sha1
%%% diffie-hellman-group14-sha1
+%%% diffie-hellman-group14-sha256
+%%% diffie-hellman-group16-sha512
+%%% diffie-hellman-group18-sha512
%%%
handle_kexdh_init(#ssh_msg_kexdh_init{e = E},
- Ssh0 = #ssh{algorithms = #alg{kex=Kex}}) ->
+ Ssh0 = #ssh{algorithms = #alg{kex=Kex} = Algs}) ->
%% server
{G, P} = dh_group(Kex),
if
1=<E, E=<(P-1) ->
- {Public, Private} = generate_key(dh, [P,G]),
+ Sz = dh_bits(Algs),
+ {Public, Private} = generate_key(dh, [P,G,2*Sz]),
K = compute_key(dh, E, Private, [P,G]),
MyPrivHostKey = get_host_key(Ssh0),
MyPubHostKey = extract_public_key(MyPrivHostKey),
@@ -367,7 +391,7 @@ handle_kexdh_init(#ssh_msg_kexdh_init{e = E},
h_sig = H_SIG
}, Ssh0),
{ok, SshPacket, Ssh1#ssh{keyex_key = {{Private, Public}, {G, P}},
- shared_secret = K,
+ shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh1, H)}};
@@ -393,7 +417,7 @@ handle_kexdh_reply(#ssh_msg_kexdh_reply{public_host_key = PeerPubHostKey,
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
Error ->
@@ -425,14 +449,13 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request{min = Min0,
%% server
{Min, Max} = adjust_gex_min_max(Min0, Max0, Opts),
case public_key:dh_gex_group(Min, NBits, Max,
- proplists:get_value(dh_gex_groups,Opts)) of
- {ok, {_Sz, {G,P}}} ->
- {Public, Private} = generate_key(dh, [P,G]),
+ ?GET_OPT(dh_gex_groups,Opts)) of
+ {ok, {_, {G,P}}} ->
{SshPacket, Ssh} =
ssh_packet(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0),
{ok, SshPacket,
- Ssh#ssh{keyex_key = {{Private, Public}, {G, P}},
- keyex_info = {Min, Max, NBits}
+ Ssh#ssh{keyex_key = {x, {G, P}},
+ keyex_info = {Min0, Max0, NBits}
}};
{error,_} ->
ssh_connection_handler:disconnect(
@@ -449,7 +472,7 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request_old{n = NBits},
%% This message was in the draft-00 of rfc4419
%% (https://tools.ietf.org/html/draft-ietf-secsh-dh-group-exchange-00)
%% In later drafts and the rfc is "is used for backward compatibility".
- %% Unfortunatly the rfc does not specify how to treat the parameter n
+ %% Unfortunately the rfc does not specify how to treat the parameter n
%% if there is no group of that modulus length :(
%% The draft-00 however specifies that n is the "... number of bits
%% the subgroup should have at least".
@@ -460,13 +483,12 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request_old{n = NBits},
Max0 = 8192,
{Min, Max} = adjust_gex_min_max(Min0, Max0, Opts),
case public_key:dh_gex_group(Min, NBits, Max,
- proplists:get_value(dh_gex_groups,Opts)) of
- {ok, {_Sz, {G,P}}} ->
- {Public, Private} = generate_key(dh, [P,G]),
+ ?GET_OPT(dh_gex_groups,Opts)) of
+ {ok, {_, {G,P}}} ->
{SshPacket, Ssh} =
ssh_packet(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0),
{ok, SshPacket,
- Ssh#ssh{keyex_key = {{Private, Public}, {G, P}},
+ Ssh#ssh{keyex_key = {x, {G, P}},
keyex_info = {-1, -1, NBits} % flag for kex_h hash calc
}};
{error,_} ->
@@ -486,28 +508,25 @@ handle_kex_dh_gex_request(_, _) ->
adjust_gex_min_max(Min0, Max0, Opts) ->
- case proplists:get_value(dh_gex_limits, Opts) of
- undefined ->
- {Min0, Max0};
- {Min1, Max1} ->
- Min2 = max(Min0, Min1),
- Max2 = min(Max0, Max1),
- if
- Min2 =< Max2 ->
- {Min2, Max2};
- Max2 < Min2 ->
- ssh_connection_handler:disconnect(
- #ssh_msg_disconnect{
- code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "No possible diffie-hellman-group-exchange group possible"
- })
- end
+ {Min1, Max1} = ?GET_OPT(dh_gex_limits, Opts),
+ Min2 = max(Min0, Min1),
+ Max2 = min(Max0, Max1),
+ if
+ Min2 =< Max2 ->
+ {Min2, Max2};
+ Max2 < Min2 ->
+ ssh_connection_handler:disconnect(
+ #ssh_msg_disconnect{
+ code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
+ description = "No possible diffie-hellman-group-exchange group possible"
+ })
end.
handle_kex_dh_gex_group(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0) ->
%% client
- {Public, Private} = generate_key(dh, [P,G]),
+ Sz = dh_bits(Ssh0#ssh.algorithms),
+ {Public, Private} = generate_key(dh, [P,G,2*Sz]),
{SshPacket, Ssh1} =
ssh_packet(#ssh_msg_kex_dh_gex_init{e = Public}, Ssh0), % Pub = G^Priv mod P (def)
@@ -532,7 +551,7 @@ handle_kex_dh_gex_init(#ssh_msg_kex_dh_gex_init{e = E},
ssh_packet(#ssh_msg_kex_dh_gex_reply{public_host_key = MyPubHostKey,
f = Public,
h_sig = H_SIG}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)
}};
@@ -568,7 +587,7 @@ handle_kex_dh_gex_reply(#ssh_msg_kex_dh_gex_reply{public_host_key = PeerPubHostK
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
_Error ->
@@ -618,7 +637,7 @@ handle_kex_ecdh_init(#ssh_msg_kex_ecdh_init{q_c = PeerPublic},
h_sig = H_SIG},
Ssh0),
{ok, SshPacket, Ssh1#ssh{keyex_key = {{MyPublic,MyPrivate},Curve},
- shared_secret = K,
+ shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh1, H)}}
catch
@@ -644,7 +663,7 @@ handle_kex_ecdh_reply(#ssh_msg_kex_ecdh_reply{public_host_key = PeerPubHostKey,
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
Error ->
@@ -687,9 +706,9 @@ sid(#ssh{session_id = Id}, _) ->
%% The host key should be read from storage
%%
get_host_key(SSH) ->
- #ssh{key_cb = Mod, opts = Opts, algorithms = ALG} = SSH,
-
- case Mod:host_key(ALG#alg.hkey, Opts) of
+ #ssh{key_cb = {KeyCb,KeyCbOpts}, opts = Opts, algorithms = ALG} = SSH,
+ UserOpts = ?GET_OPT(user_options, Opts),
+ case KeyCb:host_key(ALG#alg.hkey, [{key_cb_private,KeyCbOpts}|UserOpts]) of
{ok, #'RSAPrivateKey'{} = Key} -> Key;
{ok, #'DSAPrivateKey'{} = Key} -> Key;
{ok, #'ECPrivateKey'{} = Key} -> Key;
@@ -735,7 +754,7 @@ public_algo({#'ECPoint'{},{namedCurve,OID}}) ->
accepted_host(Ssh, PeerName, Public, Opts) ->
- case proplists:get_value(silently_accept_hosts, Opts, false) of
+ case ?GET_OPT(silently_accept_hosts, Opts) of
F when is_function(F,2) ->
true == (catch F(PeerName, public_key:ssh_hostkey_fingerprint(Public)));
{DigestAlg,F} when is_function(F,2) ->
@@ -746,16 +765,16 @@ accepted_host(Ssh, PeerName, Public, Opts) ->
yes == yes_no(Ssh, "New host " ++ PeerName ++ " accept")
end.
-known_host_key(#ssh{opts = Opts, key_cb = Mod, peer = Peer} = Ssh,
+known_host_key(#ssh{opts = Opts, key_cb = {KeyCb,KeyCbOpts}, peer = {PeerName,_}} = Ssh,
Public, Alg) ->
- PeerName = peer_name(Peer),
- case Mod:is_host_key(Public, PeerName, Alg, Opts) of
+ UserOpts = ?GET_OPT(user_options, Opts),
+ case KeyCb:is_host_key(Public, PeerName, Alg, [{key_cb_private,KeyCbOpts}|UserOpts]) of
true ->
ok;
false ->
case accepted_host(Ssh, PeerName, Public, Opts) of
true ->
- Mod:add_host_key(PeerName, Public, Opts);
+ KeyCb:add_host_key(PeerName, Public, [{key_cb_private,KeyCbOpts}|UserOpts]);
false ->
{error, rejected}
end
@@ -1117,6 +1136,51 @@ verify(PlainText, Hash, Sig, Key) ->
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Unit: bytes
+
+-record(cipher_data, {
+ key_bytes,
+ iv_bytes,
+ block_bytes
+ }).
+
+%%% Start of a more parameterized crypto handling.
+cipher('AEAD_AES_128_GCM') ->
+ #cipher_data{key_bytes = 16,
+ iv_bytes = 12,
+ block_bytes = 16};
+
+cipher('AEAD_AES_256_GCM') ->
+ #cipher_data{key_bytes = 32,
+ iv_bytes = 12,
+ block_bytes = 16};
+
+cipher('3des-cbc') ->
+ #cipher_data{key_bytes = 24,
+ iv_bytes = 8,
+ block_bytes = 8};
+
+cipher('aes128-cbc') ->
+ #cipher_data{key_bytes = 16,
+ iv_bytes = 16,
+ block_bytes = 16};
+
+cipher('aes128-ctr') ->
+ #cipher_data{key_bytes = 16,
+ iv_bytes = 16,
+ block_bytes = 16};
+
+cipher('aes192-ctr') ->
+ #cipher_data{key_bytes = 24,
+ iv_bytes = 16,
+ block_bytes = 16};
+
+cipher('aes256-ctr') ->
+ #cipher_data{key_bytes = 32,
+ iv_bytes = 16,
+ block_bytes = 16}.
+
+
encrypt_init(#ssh{encrypt = none} = Ssh) ->
{ok, Ssh};
encrypt_init(#ssh{encrypt = 'AEAD_AES_128_GCM', role = client} = Ssh) ->
@@ -1497,11 +1561,11 @@ send_mac_init(SSH) ->
common ->
case SSH#ssh.role of
client ->
- KeySize = mac_key_size(SSH#ssh.send_mac),
+ KeySize = 8*mac_key_bytes(SSH#ssh.send_mac),
Key = hash(SSH, "E", KeySize),
{ok, SSH#ssh { send_mac_key = Key }};
server ->
- KeySize = mac_key_size(SSH#ssh.send_mac),
+ KeySize = 8*mac_key_bytes(SSH#ssh.send_mac),
Key = hash(SSH, "F", KeySize),
{ok, SSH#ssh { send_mac_key = Key }}
end;
@@ -1520,10 +1584,10 @@ recv_mac_init(SSH) ->
common ->
case SSH#ssh.role of
client ->
- Key = hash(SSH, "F", mac_key_size(SSH#ssh.recv_mac)),
+ Key = hash(SSH, "F", 8*mac_key_bytes(SSH#ssh.recv_mac)),
{ok, SSH#ssh { recv_mac_key = Key }};
server ->
- Key = hash(SSH, "E", mac_key_size(SSH#ssh.recv_mac)),
+ Key = hash(SSH, "E", 8*mac_key_bytes(SSH#ssh.recv_mac)),
{ok, SSH#ssh { recv_mac_key = Key }}
end;
aead ->
@@ -1549,48 +1613,27 @@ mac('hmac-sha2-256', Key, SeqNum, Data) ->
mac('hmac-sha2-512', Key, SeqNum, Data) ->
crypto:hmac(sha512, Key, [<<?UINT32(SeqNum)>>, Data]).
-%% return N hash bytes (HASH)
-hash(SSH, Char, Bits) ->
- HASH =
- case SSH#ssh.kex of
- 'diffie-hellman-group1-sha1' ->
- fun(Data) -> crypto:hash(sha, Data) end;
- 'diffie-hellman-group14-sha1' ->
- fun(Data) -> crypto:hash(sha, Data) end;
-
- 'diffie-hellman-group-exchange-sha1' ->
- fun(Data) -> crypto:hash(sha, Data) end;
- 'diffie-hellman-group-exchange-sha256' ->
- fun(Data) -> crypto:hash(sha256, Data) end;
-
- 'ecdh-sha2-nistp256' ->
- fun(Data) -> crypto:hash(sha256,Data) end;
- 'ecdh-sha2-nistp384' ->
- fun(Data) -> crypto:hash(sha384,Data) end;
- 'ecdh-sha2-nistp521' ->
- fun(Data) -> crypto:hash(sha512,Data) end;
- _ ->
- exit({bad_algorithm,SSH#ssh.kex})
- end,
- hash(SSH, Char, Bits, HASH).
-hash(_SSH, _Char, 0, _HASH) ->
+%%%----------------------------------------------------------------
+%% return N hash bytes (HASH)
+hash(_SSH, _Char, 0) ->
<<>>;
-hash(SSH, Char, N, HASH) ->
- K = ssh_bits:mpint(SSH#ssh.shared_secret),
+hash(SSH, Char, N) ->
+ HashAlg = sha(SSH#ssh.kex),
+ K = SSH#ssh.shared_secret,
H = SSH#ssh.exchanged_hash,
- SessionID = SSH#ssh.session_id,
- K1 = HASH([K, H, Char, SessionID]),
+ K1 = crypto:hash(HashAlg, [K, H, Char, SSH#ssh.session_id]),
Sz = N div 8,
- <<Key:Sz/binary, _/binary>> = hash(K, H, K1, N-128, HASH),
+ <<Key:Sz/binary, _/binary>> = hash(K, H, K1, N-128, HashAlg),
Key.
-hash(_K, _H, Ki, N, _HASH) when N =< 0 ->
+hash(_K, _H, Ki, N, _HashAlg) when N =< 0 ->
Ki;
-hash(K, H, Ki, N, HASH) ->
- Kj = HASH([K, H, Ki]),
- hash(K, H, <<Ki/binary, Kj/binary>>, N-128, HASH).
+hash(K, H, Ki, N, HashAlg) ->
+ Kj = crypto:hash(HashAlg, [K, H, Ki]),
+ hash(K, H, <<Ki/binary, Kj/binary>>, N-128, HashAlg).
+%%%----------------------------------------------------------------
kex_h(SSH, Key, E, F, K) ->
KeyBin = public_key:ssh_encode(Key, ssh2_pubkey),
L = <<?Estring(SSH#ssh.c_version), ?Estring(SSH#ssh.s_version),
@@ -1633,20 +1676,28 @@ sha(secp384r1) -> sha384;
sha(secp521r1) -> sha512;
sha('diffie-hellman-group1-sha1') -> sha;
sha('diffie-hellman-group14-sha1') -> sha;
+sha('diffie-hellman-group14-sha256') -> sha256;
+sha('diffie-hellman-group16-sha512') -> sha512;
+sha('diffie-hellman-group18-sha512') -> sha512;
sha('diffie-hellman-group-exchange-sha1') -> sha;
sha('diffie-hellman-group-exchange-sha256') -> sha256;
sha(?'secp256r1') -> sha(secp256r1);
sha(?'secp384r1') -> sha(secp384r1);
-sha(?'secp521r1') -> sha(secp521r1).
-
-
-mac_key_size('hmac-sha1') -> 20*8;
-mac_key_size('hmac-sha1-96') -> 20*8;
-mac_key_size('hmac-md5') -> 16*8;
-mac_key_size('hmac-md5-96') -> 16*8;
-mac_key_size('hmac-sha2-256')-> 32*8;
-mac_key_size('hmac-sha2-512')-> 512;
-mac_key_size(none) -> 0.
+sha(?'secp521r1') -> sha(secp521r1);
+sha('ecdh-sha2-nistp256') -> sha(secp256r1);
+sha('ecdh-sha2-nistp384') -> sha(secp384r1);
+sha('ecdh-sha2-nistp521') -> sha(secp521r1).
+
+
+mac_key_bytes('hmac-sha1') -> 20;
+mac_key_bytes('hmac-sha1-96') -> 20;
+mac_key_bytes('hmac-md5') -> 16;
+mac_key_bytes('hmac-md5-96') -> 16;
+mac_key_bytes('hmac-sha2-256')-> 32;
+mac_key_bytes('hmac-sha2-512')-> 64;
+mac_key_bytes('AEAD_AES_128_GCM') -> 0;
+mac_key_bytes('AEAD_AES_256_GCM') -> 0;
+mac_key_bytes(none) -> 0.
mac_digest_size('hmac-sha1') -> 20;
mac_digest_size('hmac-sha1-96') -> 12;
@@ -1658,9 +1709,6 @@ mac_digest_size('AEAD_AES_128_GCM') -> 16;
mac_digest_size('AEAD_AES_256_GCM') -> 16;
mac_digest_size(none) -> 0.
-peer_name({Host, _}) ->
- Host.
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Diffie-Hellman utils
@@ -1668,9 +1716,19 @@ peer_name({Host, _}) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
dh_group('diffie-hellman-group1-sha1') -> ?dh_group1;
-dh_group('diffie-hellman-group14-sha1') -> ?dh_group14.
+dh_group('diffie-hellman-group14-sha1') -> ?dh_group14;
+dh_group('diffie-hellman-group14-sha256') -> ?dh_group14;
+dh_group('diffie-hellman-group16-sha512') -> ?dh_group16;
+dh_group('diffie-hellman-group18-sha512') -> ?dh_group18.
%%%----------------------------------------------------------------
+parallell_gen_key(Ssh = #ssh{keyex_key = {x, {G, P}},
+ algorithms = Algs}) ->
+ Sz = dh_bits(Algs),
+ {Public, Private} = generate_key(dh, [P,G,2*Sz]),
+ Ssh#ssh{keyex_key = {{Private, Public}, {G, P}}}.
+
+
generate_key(Algorithm, Args) ->
{Public,Private} = crypto:generate_key(Algorithm, Args),
{crypto:bytes_to_integer(Public), crypto:bytes_to_integer(Private)}.
@@ -1681,6 +1739,15 @@ compute_key(Algorithm, OthersPublic, MyPrivate, Args) ->
crypto:bytes_to_integer(Shared).
+dh_bits(#alg{encrypt = Encrypt,
+ send_mac = SendMac}) ->
+ C = cipher(Encrypt),
+ 8 * lists:max([C#cipher_data.key_bytes,
+ C#cipher_data.block_bytes,
+ C#cipher_data.iv_bytes,
+ mac_key_bytes(SendMac)
+ ]).
+
ecdh_curve('ecdh-sha2-nistp256') -> secp256r1;
ecdh_curve('ecdh-sha2-nistp384') -> secp384r1;
ecdh_curve('ecdh-sha2-nistp521') -> secp521r1.
@@ -1743,10 +1810,6 @@ len_supported(Name, Len) ->
same(Algs) -> [{client2server,Algs}, {server2client,Algs}].
-
-%% default_algorithms(kex) -> % Example of how to disable an algorithm
-%% supported_algorithms(kex, ['ecdh-sha2-nistp521']);
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Other utils
diff --git a/lib/ssh/src/ssh_transport.hrl b/lib/ssh/src/ssh_transport.hrl
index f91cb1dd63..19b3f5c437 100644
--- a/lib/ssh/src/ssh_transport.hrl
+++ b/lib/ssh/src/ssh_transport.hrl
@@ -112,7 +112,7 @@
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% diffie-hellman-group1-sha1 | diffie-hellman-group14-sha1
+%% diffie-hellman-group*-sha*
-define(SSH_MSG_KEXDH_INIT, 30).
-define(SSH_MSG_KEXDH_REPLY, 31).
@@ -238,4 +238,15 @@
-define(dh_group14,
{2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF}).
+%%% rfc 3526, ch5
+%%% Size 4096-bit
+-define(dh_group16,
+ {2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF}).
+
+%%% rfc 3526, ch7
+%%% Size 8192-bit
+-define(dh_group18,
+ {2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E438777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F5683423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD922222E04A4037C0713EB57A81A23F0C73473FC646CEA306B4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A364597E899A0255DC164F31CC50846851DF9AB48195DED7EA1B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F924009438B481C6CD7889A002ED5EE382BC9190DA6FC026E479558E4475677E9AA9E3050E2765694DFC81F56E880B96E7160C980DD98EDD3DFFFFFFFFFFFFFFFFF}).
+
+
-endif. % -ifdef(ssh_transport).
diff --git a/lib/ssh/src/sshd_sup.erl b/lib/ssh/src/sshd_sup.erl
index 04d2df30f7..14f1937abd 100644
--- a/lib/ssh/src/sshd_sup.erl
+++ b/lib/ssh/src/sshd_sup.erl
@@ -41,13 +41,13 @@
start_link(Servers) ->
supervisor:start_link({local, ?MODULE}, ?MODULE, [Servers]).
-start_child(ServerOpts) ->
- Address = proplists:get_value(address, ServerOpts),
- Port = proplists:get_value(port, ServerOpts),
- Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE),
+start_child(Options) ->
+ Address = ?GET_INTERNAL_OPT(address, Options),
+ Port = ?GET_INTERNAL_OPT(port, Options),
+ Profile = ?GET_OPT(profile, Options),
case ssh_system_sup:system_supervisor(Address, Port, Profile) of
undefined ->
- Spec = child_spec(Address, Port, ServerOpts),
+ Spec = child_spec(Address, Port, Options),
case supervisor:start_child(?MODULE, Spec) of
{error, already_present} ->
Name = id(Address, Port, Profile),
@@ -58,7 +58,7 @@ start_child(ServerOpts) ->
end;
Pid ->
AccPid = ssh_system_sup:acceptor_supervisor(Pid),
- ssh_acceptor_sup:start_child(AccPid, ServerOpts)
+ ssh_acceptor_sup:start_child(AccPid, Options)
end.
stop_child(Name) ->
@@ -82,8 +82,8 @@ init([Servers]) ->
MaxR = 10,
MaxT = 3600,
Fun = fun(ServerOpts) ->
- Address = proplists:get_value(address, ServerOpts),
- Port = proplists:get_value(port, ServerOpts),
+ Address = ?GET_INTERNAL_OPT(address, ServerOpts),
+ Port = ?GET_INTERNAL_OPT(port, ServerOpts),
child_spec(Address, Port, ServerOpts)
end,
Children = lists:map(Fun, Servers),
@@ -92,10 +92,10 @@ init([Servers]) ->
%%%=========================================================================
%%% Internal functions
%%%=========================================================================
-child_spec(Address, Port, ServerOpts) ->
- Profile = proplists:get_value(profile, proplists:get_value(ssh_opts, ServerOpts), ?DEFAULT_PROFILE),
+child_spec(Address, Port, Options) ->
+ Profile = ?GET_OPT(profile, Options),
Name = id(Address, Port,Profile),
- StartFunc = {ssh_system_sup, start_link, [ServerOpts]},
+ StartFunc = {ssh_system_sup, start_link, [Options]},
Restart = temporary,
Shutdown = infinity,
Modules = [ssh_system_sup],
diff --git a/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl b/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
index 0f8a838f97..8ca29b9399 100644
--- a/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
+++ b/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
@@ -184,10 +184,7 @@ gen_byte(N) when N>0 -> [gen_byte() || _ <- lists:seq(1,N)].
gen_char() -> choose($a,$z).
-gen_mpint() -> ?LET(Size, choose(1,20),
- ?LET(Str, vector(Size, gen_byte()),
- gen_string( strip_0s(Str) )
- )).
+gen_mpint() -> ?LET(I, largeint(), ssh_bits:mpint(I)).
strip_0s([0|T]) -> strip_0s(T);
strip_0s(X) -> X.
diff --git a/lib/ssh/test/ssh_algorithms_SUITE.erl b/lib/ssh/test/ssh_algorithms_SUITE.erl
index 14605ee44f..6f75d83c4a 100644
--- a/lib/ssh/test/ssh_algorithms_SUITE.erl
+++ b/lib/ssh/test/ssh_algorithms_SUITE.erl
@@ -58,9 +58,11 @@ groups() ->
|| {Tag,Algs} <- ErlAlgos,
lists:member(Tag,tags())
],
+
+ TypeSSH = ssh_test_lib:ssh_type(),
AlgoTcSet =
- [{Alg, [parallel], specific_test_cases(Tag,Alg,SshcAlgos,SshdAlgos)}
+ [{Alg, [parallel], specific_test_cases(Tag,Alg,SshcAlgos,SshdAlgos,TypeSSH)}
|| {Tag,Algs} <- ErlAlgos ++ DoubleAlgos,
Alg <- Algs],
@@ -198,8 +200,9 @@ try_exec_simple_group(Group, Config) ->
%%--------------------------------------------------------------------
%% Testing all default groups
-simple_exec_groups() -> [{timetrap,{minutes,8}}].
-
+simple_exec_groups() ->
+ [{timetrap,{seconds,120}}].
+
simple_exec_groups(Config) ->
Sizes = interpolate( public_key:dh_gex_group_sizes() ),
lists:foreach(
@@ -315,18 +318,13 @@ concat(A1, A2) -> list_to_atom(lists:concat([A1," + ",A2])).
split(Alg) -> ssh_test_lib:to_atoms(string:tokens(atom_to_list(Alg), " + ")).
-specific_test_cases(Tag, Alg, SshcAlgos, SshdAlgos) ->
+specific_test_cases(Tag, Alg, SshcAlgos, SshdAlgos, TypeSSH) ->
[simple_exec, simple_sftp] ++
case supports(Tag, Alg, SshcAlgos) of
- true ->
- case ssh_test_lib:ssh_type() of
- openSSH ->
- [sshc_simple_exec_os_cmd];
- _ ->
- []
- end;
- false ->
- []
+ true when TypeSSH == openSSH ->
+ [sshc_simple_exec_os_cmd];
+ _ ->
+ []
end ++
case supports(Tag, Alg, SshdAlgos) of
true ->
diff --git a/lib/ssh/test/ssh_basic_SUITE.erl b/lib/ssh/test/ssh_basic_SUITE.erl
index 0a0ab5cdf7..cdf6cf9ae1 100644
--- a/lib/ssh/test/ssh_basic_SUITE.erl
+++ b/lib/ssh/test/ssh_basic_SUITE.erl
@@ -152,15 +152,27 @@ end_per_suite(_Config) ->
%%--------------------------------------------------------------------
init_per_group(dsa_key, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_dsa(DataDir, PrivDir),
- Config;
+ case lists:member('ssh-dss',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_dsa(DataDir, PrivDir),
+ Config;
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(rsa_key, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_rsa(DataDir, PrivDir),
- Config;
+ case lists:member('ssh-rsa',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_rsa(DataDir, PrivDir),
+ Config;
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(ecdsa_sha2_nistp256_key, Config) ->
case lists:member('ecdsa-sha2-nistp256',
ssh_transport:default_algorithms(public_key)) of
@@ -195,15 +207,27 @@ init_per_group(ecdsa_sha2_nistp521_key, Config) ->
{skip, unsupported_pub_key}
end;
init_per_group(rsa_pass_key, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_rsa_pass_pharse(DataDir, PrivDir, "Password"),
- [{pass_phrase, {rsa_pass_phrase, "Password"}}| Config];
+ case lists:member('ssh-rsa',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_rsa_pass_pharse(DataDir, PrivDir, "Password"),
+ [{pass_phrase, {rsa_pass_phrase, "Password"}}| Config];
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(dsa_pass_key, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_dsa_pass_pharse(DataDir, PrivDir, "Password"),
- [{pass_phrase, {dsa_pass_phrase, "Password"}}| Config];
+ case lists:member('ssh-dss',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_dsa_pass_pharse(DataDir, PrivDir, "Password"),
+ [{pass_phrase, {dsa_pass_phrase, "Password"}}| Config];
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(host_user_key_differs, Config) ->
Data = proplists:get_value(data_dir, Config),
Sys = filename:join(proplists:get_value(priv_dir, Config), system_rsa),
@@ -220,10 +244,16 @@ init_per_group(host_user_key_differs, Config) ->
ssh_test_lib:setup_rsa_known_host(Sys, Usr),
Config;
init_per_group(key_cb, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_dsa(DataDir, PrivDir),
- Config;
+ case lists:member('ssh-rsa',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_rsa(DataDir, PrivDir),
+ Config;
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(internal_error, Config) ->
DataDir = proplists:get_value(data_dir, Config),
PrivDir = proplists:get_value(priv_dir, Config),
@@ -293,7 +323,7 @@ end_per_group(rsa_pass_key, Config) ->
Config;
end_per_group(key_cb, Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:clean_dsa(PrivDir),
+ ssh_test_lib:clean_rsa(PrivDir),
Config;
end_per_group(internal_error, Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
@@ -750,7 +780,7 @@ key_callback_options(Config) when is_list(Config) ->
{user_dir, UserDir},
{failfun, fun ssh_test_lib:failfun/2}]),
- {ok, PrivKey} = file:read_file(filename:join(UserDir, "id_dsa")),
+ {ok, PrivKey} = file:read_file(filename:join(UserDir, "id_rsa")),
ConnectOpts = [{silently_accept_hosts, true},
{user_dir, NoPubKeyDir},
@@ -1206,7 +1236,7 @@ check_error("Invalid state") ->
ok;
check_error("Connection closed") ->
ok;
-check_error("Selection of key exchange algorithm failed") ->
+check_error("Selection of key exchange algorithm failed"++_) ->
ok;
check_error(Error) ->
ct:fail(Error).
diff --git a/lib/ssh/test/ssh_benchmark_SUITE.erl b/lib/ssh/test/ssh_benchmark_SUITE.erl
index c2bfc48449..fc90750455 100644
--- a/lib/ssh/test/ssh_benchmark_SUITE.erl
+++ b/lib/ssh/test/ssh_benchmark_SUITE.erl
@@ -30,7 +30,7 @@
suite() -> [{ct_hooks,[{ts_install_cth,[{nodenames,2}]}]},
- {timetrap,{minutes,3}}
+ {timetrap,{minutes,6}}
].
%%suite() -> [{ct_hooks,[ts_install_cth]}].
@@ -70,9 +70,12 @@ init_per_group(opensshc_erld, Config) ->
ssh_test_lib:setup_dsa(DataDir, UserDir),
ssh_test_lib:setup_rsa(DataDir, UserDir),
ssh_test_lib:setup_ecdsa("256", DataDir, UserDir),
+ AlgsD = ssh:default_algorithms(),
+ AlgsC = ssh_test_lib:default_algorithms(sshc),
Common = ssh_test_lib:intersect_bi_dir(
- ssh_test_lib:intersection(ssh:default_algorithms(),
- ssh_test_lib:default_algorithms(sshc))),
+ ssh_test_lib:intersection(AlgsD, AlgsC)),
+ ct:pal("~p~n~nErld:~n~p~n~nOpenSSHc:~n~p~n~nCommon:~n~p",
+ [inet:gethostname(), AlgsD, AlgsC, Common]),
[{c_kexs, ssh_test_lib:sshc(kex)},
{c_ciphers, ssh_test_lib:sshc(cipher)},
{common_algs, Common}
@@ -136,7 +139,6 @@ openssh_client_shell(Config, Options) ->
{ok, TracerPid} = erlang_trace(),
{ServerPid, _Host, Port} =
ssh_test_lib:daemon([{system_dir, SystemDir},
- {public_key_alg, ssh_dsa},
{failfun, fun ssh_test_lib:failfun/2} |
Options]),
ct:sleep(500),
@@ -212,7 +214,6 @@ openssh_client_sftp(Config, Options) ->
{ok, TracerPid} = erlang_trace(),
{ServerPid, _Host, Port} =
ssh_test_lib:daemon([{system_dir, SystemDir},
- {public_key_alg, ssh_dsa},
{subsystems,[ssh_sftpd:subsystem_spec([%{cwd, SftpSrcDir},
{root, SftpSrcDir}])]},
{failfun, fun ssh_test_lib:failfun/2}
@@ -427,13 +428,20 @@ function_algs_times_sizes(EncDecs, L) ->
|| {Alg,Size,Time} <- lists:foldl(fun increment/2, [], Raw)].
function_ats_result({ssh_transport,encrypt,2}, #call{args=[S,Data]}) ->
- {{encrypt,S#ssh.encrypt}, size(Data)};
+ {{encrypt,S#ssh.encrypt}, binsize(Data)};
function_ats_result({ssh_transport,decrypt,2}, #call{args=[S,Data]}) ->
- {{decrypt,S#ssh.decrypt}, size(Data)};
+ {{decrypt,S#ssh.decrypt}, binsize(Data)};
function_ats_result({ssh_message,encode,1}, #call{result=Data}) ->
{encode, size(Data)};
function_ats_result({ssh_message,decode,1}, #call{args=[Data]}) ->
{decode, size(Data)}.
+
+binsize(B) when is_binary(B) -> size(B);
+binsize({B1,B2}) when is_binary(B1), is_binary(B2) -> size(B1) + size(B2);
+binsize({B1,B2,_}) when is_binary(B1), is_binary(B2) -> size(B1) + size(B2).
+
+
+
increment({Alg,Sz,T}, [{Alg,SumSz,SumT}|Acc]) ->
diff --git a/lib/ssh/test/ssh_key_cb.erl b/lib/ssh/test/ssh_key_cb.erl
index 388ec2ecc1..12ff79efcd 100644
--- a/lib/ssh/test/ssh_key_cb.erl
+++ b/lib/ssh/test/ssh_key_cb.erl
@@ -33,9 +33,9 @@ add_host_key(_, _, _) ->
is_host_key(_, _, _, _) ->
true.
-user_key('ssh-dss', Opts) ->
+user_key('ssh-rsa', Opts) ->
UserDir = proplists:get_value(user_dir, Opts),
- KeyFile = filename:join(filename:dirname(UserDir), "id_dsa"),
+ KeyFile = filename:join(filename:dirname(UserDir), "id_rsa"),
{ok, KeyBin} = file:read_file(KeyFile),
[Entry] = public_key:pem_decode(KeyBin),
Key = public_key:pem_entry_decode(Entry),
diff --git a/lib/ssh/test/ssh_key_cb_options.erl b/lib/ssh/test/ssh_key_cb_options.erl
index afccb34f0f..946a1254d0 100644
--- a/lib/ssh/test/ssh_key_cb_options.erl
+++ b/lib/ssh/test/ssh_key_cb_options.erl
@@ -33,7 +33,7 @@ add_host_key(_, _, _) ->
is_host_key(_, _, _, _) ->
true.
-user_key('ssh-dss', Opts) ->
+user_key('ssh-rsa', Opts) ->
KeyCbOpts = proplists:get_value(key_cb_private, Opts),
KeyBin = proplists:get_value(priv_key, KeyCbOpts),
[Entry] = public_key:pem_decode(KeyBin),
diff --git a/lib/ssh/test/ssh_options_SUITE.erl b/lib/ssh/test/ssh_options_SUITE.erl
index 86f5cb1746..758c20e2b8 100644
--- a/lib/ssh/test/ssh_options_SUITE.erl
+++ b/lib/ssh/test/ssh_options_SUITE.erl
@@ -67,7 +67,8 @@
hostkey_fingerprint_check_sha/1,
hostkey_fingerprint_check_sha256/1,
hostkey_fingerprint_check_sha384/1,
- hostkey_fingerprint_check_sha512/1
+ hostkey_fingerprint_check_sha512/1,
+ hostkey_fingerprint_check_list/1
]).
%%% Common test callbacks
@@ -112,6 +113,7 @@ all() ->
hostkey_fingerprint_check_sha256,
hostkey_fingerprint_check_sha384,
hostkey_fingerprint_check_sha512,
+ hostkey_fingerprint_check_list,
id_string_no_opt_client,
id_string_own_string_client,
id_string_random_client,
@@ -148,6 +150,7 @@ init_per_group(hardening_tests, Config) ->
DataDir = proplists:get_value(data_dir, Config),
PrivDir = proplists:get_value(priv_dir, Config),
ssh_test_lib:setup_dsa(DataDir, PrivDir),
+ ssh_test_lib:setup_rsa(DataDir, PrivDir),
Config;
init_per_group(dir_options, Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
@@ -812,6 +815,8 @@ hostkey_fingerprint_check_sha384(Config) ->
hostkey_fingerprint_check_sha512(Config) ->
do_hostkey_fingerprint_check(Config, sha512).
+hostkey_fingerprint_check_list(Config) ->
+ do_hostkey_fingerprint_check(Config, [sha,md5,sha256]).
%%%----
do_hostkey_fingerprint_check(Config, HashAlg) ->
@@ -824,9 +829,10 @@ do_hostkey_fingerprint_check(Config, HashAlg) ->
supported_hash(old) -> true;
supported_hash(HashAlg) ->
- proplists:get_value(HashAlg,
- proplists:get_value(hashs, crypto:supports(), []),
- false).
+ Hs = if is_atom(HashAlg) -> [HashAlg];
+ is_list(HashAlg) -> HashAlg
+ end,
+ [] == (Hs -- proplists:get_value(hashs, crypto:supports(), [])).
really_do_hostkey_fingerprint_check(Config, HashAlg) ->
@@ -840,7 +846,7 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) ->
%% All host key fingerprints. Trust that public_key has checked the ssh_hostkey_fingerprint
%% function since that function is used by the ssh client...
- FPs = [case HashAlg of
+ FPs0 = [case HashAlg of
old -> public_key:ssh_hostkey_fingerprint(Key);
_ -> public_key:ssh_hostkey_fingerprint(HashAlg, Key)
end
@@ -856,6 +862,9 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) ->
_:_ -> []
end
end],
+ FPs = if is_atom(HashAlg) -> FPs0;
+ is_list(HashAlg) -> lists:concat(FPs0)
+ end,
ct:log("Fingerprints(~p) = ~p",[HashAlg,FPs]),
%% Start daemon with the public keys that we got fingerprints from
@@ -866,8 +875,12 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) ->
FP_check_fun = fun(PeerName, FP) ->
ct:pal("PeerName = ~p, FP = ~p",[PeerName,FP]),
HostCheck = (Host == PeerName),
- FPCheck = lists:member(FP, FPs),
- ct:log("check ~p == ~p (~p) and ~n~p in ~p (~p)~n",
+ FPCheck =
+ if is_atom(HashAlg) -> lists:member(FP, FPs);
+ is_list(HashAlg) -> lists:all(fun(FP1) -> lists:member(FP1,FPs) end,
+ FP)
+ end,
+ ct:log("check ~p == ~p (~p) and ~n~p~n in ~p (~p)~n",
[PeerName,Host,HostCheck,FP,FPs,FPCheck]),
HostCheck and FPCheck
end,
diff --git a/lib/ssh/test/ssh_protocol_SUITE.erl b/lib/ssh/test/ssh_protocol_SUITE.erl
index 93d0bc2eb0..2c4fa8be88 100644
--- a/lib/ssh/test/ssh_protocol_SUITE.erl
+++ b/lib/ssh/test/ssh_protocol_SUITE.erl
@@ -34,6 +34,12 @@
-define(NEWLINE, <<"\r\n">>).
-define(REKEY_DATA_TMO, 65000).
+%%-define(DEFAULT_KEX, 'diffie-hellman-group1-sha1').
+-define(DEFAULT_KEX, 'diffie-hellman-group14-sha256').
+
+-define(CIPHERS, ['aes256-ctr','aes192-ctr','aes128-ctr','aes128-cbc','3des-cbc']).
+-define(DEFAULT_CIPHERS, [{client2server,?CIPHERS}, {server2client,?CIPHERS}]).
+
-define(v(Key, Config), proplists:get_value(Key, Config)).
-define(v(Key, Config, Default), proplists:get_value(Key, Config, Default)).
@@ -97,7 +103,9 @@ end_per_suite(Config) ->
init_per_testcase(no_common_alg_server_disconnects, Config) ->
- start_std_daemon(Config, [{preferred_algorithms,[{public_key,['ssh-rsa']}]}]);
+ start_std_daemon(Config, [{preferred_algorithms,[{public_key,['ssh-rsa']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}]);
init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ;
TC == gex_client_init_option_groups_moduli_file ;
@@ -107,7 +115,10 @@ init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ;
TC == gex_client_old_request_noexact ->
Opts = case TC of
gex_client_init_option_groups ->
- [{dh_gex_groups, [{2345, 3, 41}]}];
+ [{dh_gex_groups,
+ [{1023, 5,
+ 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F
+ }]}];
gex_client_init_option_groups_file ->
DataDir = proplists:get_value(data_dir, Config),
F = filename:join(DataDir, "dh_group_test"),
@@ -119,16 +130,19 @@ init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ;
_ when TC == gex_server_gex_limit ;
TC == gex_client_old_request_exact ;
TC == gex_client_old_request_noexact ->
- [{dh_gex_groups, [{ 500, 3, 17},
- {1000, 7, 91},
- {3000, 5, 61}]},
- {dh_gex_limits,{500,1500}}
+ [{dh_gex_groups,
+ [{1023, 2, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A771225323},
+ {1535, 5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827},
+ {3071, 2, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9429825A2B}
+ ]},
+ {dh_gex_limits, {1023,2000}}
];
_ ->
[]
end,
start_std_daemon(Config,
- [{preferred_algorithms, ssh:default_algorithms()}
+ [{preferred_algorithms,[{cipher,?DEFAULT_CIPHERS}
+ ]}
| Opts]);
init_per_testcase(_TestCase, Config) ->
check_std_daemon_works(Config, ?LINE).
@@ -237,7 +251,10 @@ lib_works_as_server(Config) ->
%% and finally connect to it with a regular Erlang SSH client:
{ok,_} = std_connect(HostPort, Config,
- [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]}]
+ [{preferred_algorithms,[{kex,[?DEFAULT_KEX]},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}
+ ]
).
%%--------------------------------------------------------------------
@@ -277,7 +294,9 @@ no_common_alg_server_disconnects(Config) ->
[{silently_accept_hosts, true},
{user_dir, user_dir(Config)},
{user_interaction, false},
- {preferred_algorithms,[{public_key,['ssh-dss']}]}
+ {preferred_algorithms,[{public_key,['ssh-dss']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}
]},
receive_hello,
{send, hello},
@@ -311,7 +330,7 @@ no_common_alg_client_disconnects(Config) ->
{match, #ssh_msg_kexinit{_='_'}, receive_msg},
{send, #ssh_msg_kexinit{ % with unsupported "SOME-UNSUPPORTED"
cookie = <<80,158,95,51,174,35,73,130,246,141,200,49,180,190,82,234>>,
- kex_algorithms = ["diffie-hellman-group1-sha1"],
+ kex_algorithms = [atom_to_list(?DEFAULT_KEX)],
server_host_key_algorithms = ["SOME-UNSUPPORTED"], % SIC!
encryption_algorithms_client_to_server = ["aes128-ctr"],
encryption_algorithms_server_to_client = ["aes128-ctr"],
@@ -332,7 +351,9 @@ no_common_alg_client_disconnects(Config) ->
%% and finally connect to it with a regular Erlang SSH client
%% which of course does not support SOME-UNSUPPORTED as pub key algo:
- Result = std_connect(HostPort, Config, [{preferred_algorithms,[{public_key,['ssh-dss']}]}]),
+ Result = std_connect(HostPort, Config, [{preferred_algorithms,[{public_key,['ssh-dss']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}]),
ct:log("Result of connect is ~p",[Result]),
receive
@@ -351,20 +372,25 @@ no_common_alg_client_disconnects(Config) ->
%%%--------------------------------------------------------------------
gex_client_init_option_groups(Config) ->
- do_gex_client_init(Config, {2000, 2048, 4000},
- {3,41}).
+ do_gex_client_init(Config, {512, 2048, 4000},
+ {5,16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F}
+ ).
gex_client_init_option_groups_file(Config) ->
do_gex_client_init(Config, {2000, 2048, 4000},
- {5,61}).
+ {5, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9424273F1F}
+ ).
gex_client_init_option_groups_moduli_file(Config) ->
do_gex_client_init(Config, {2000, 2048, 4000},
- {5,16#B7}).
+ {5, 16#DD2047CBDBB6F8E919BC63DE885B34D0FD6E3DB2887D8B46FE249886ACED6B46DFCD5553168185FD376122171CD8927E60120FA8D01F01D03E58281FEA9A1ABE97631C828E41815F34FDCDF787419FE13A3137649AA93D2584230DF5F24B5C00C88B7D7DE4367693428C730376F218A53E853B0851BAB7C53C15DA7839CBE1285DB63F6FA45C1BB59FE1C5BB918F0F8459D7EF60ACFF5C0FA0F3FCAD1C5F4CE4416D4F4B36B05CDCEBE4FB879E95847EFBC6449CD190248843BC7EDB145FBFC4EDBB1A3C959298F08F3BA2CFBE231BBE204BE6F906209D28BD4820AB3E7BE96C26AE8A809ADD8D1A5A0B008E9570FA4C4697E116B8119892C604293683A9635F}
+ ).
gex_server_gex_limit(Config) ->
do_gex_client_init(Config, {1000, 3000, 4000},
- {7,91}).
+ %% {7,91}).
+ {5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827}
+ ).
do_gex_client_init(Config, {Min,N,Max}, {G,P}) ->
@@ -376,7 +402,9 @@ do_gex_client_init(Config, {Min,N,Max}, {G,P}) ->
[{silently_accept_hosts, true},
{user_dir, user_dir(Config)},
{user_interaction, false},
- {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']}]}
+ {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}
]},
receive_hello,
{send, hello},
@@ -390,8 +418,15 @@ do_gex_client_init(Config, {Min,N,Max}, {G,P}) ->
).
%%%--------------------------------------------------------------------
-gex_client_old_request_exact(Config) -> do_gex_client_init_old(Config, 500, {3,17}).
-gex_client_old_request_noexact(Config) -> do_gex_client_init_old(Config, 800, {7,91}).
+gex_client_old_request_exact(Config) ->
+ do_gex_client_init_old(Config, 1023,
+ {2, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A771225323}
+ ).
+
+gex_client_old_request_noexact(Config) ->
+ do_gex_client_init_old(Config, 1400,
+ {5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827}
+ ).
do_gex_client_init_old(Config, N, {G,P}) ->
{ok,_} =
@@ -402,7 +437,9 @@ do_gex_client_init_old(Config, N, {G,P}) ->
[{silently_accept_hosts, true},
{user_dir, user_dir(Config)},
{user_interaction, false},
- {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']}]}
+ {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}
]},
receive_hello,
{send, hello},
@@ -572,7 +609,9 @@ client_handles_keyboard_interactive_0_pwds(Config) ->
%% and finally connect to it with a regular Erlang SSH client:
{ok,_} = std_connect(HostPort, Config,
- [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]}]
+ [{preferred_algorithms,[{kex,[?DEFAULT_KEX]},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}]
).
@@ -623,6 +662,7 @@ stop_apps(_Config) ->
setup_dirs(Config) ->
DataDir = proplists:get_value(data_dir, Config),
PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_dsa(DataDir, PrivDir),
ssh_test_lib:setup_rsa(DataDir, PrivDir),
Config.
@@ -708,7 +748,9 @@ connect_and_kex(Config, InitialState) ->
ssh_trpt_test_lib:exec(
[{connect,
server_host(Config),server_port(Config),
- [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]},
+ [{preferred_algorithms,[{kex,[?DEFAULT_KEX]},
+ {cipher,?DEFAULT_CIPHERS}
+ ]},
{silently_accept_hosts, true},
{user_dir, user_dir(Config)},
{user_interaction, false}]},
diff --git a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
index 2887bb4b60..87c4b4afc8 100644
--- a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
+++ b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
@@ -1,3 +1,3 @@
-{2222, 5, 61}.
-{1111, 7, 91}.
+{1023, 5, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F}.
+{3071, 5, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9424273F1F}.
diff --git a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
index f6995ba4c9..6d2b4bcb59 100644
--- a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
+++ b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
@@ -1,3 +1,2 @@
-20151021104105 2 6 100 2222 5 B7
-20151021104106 2 6 100 1111 5 4F
-
+20120821044046 2 6 100 1023 2 D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A7711F2C6B
+20120821050554 2 6 100 2047 5 DD2047CBDBB6F8E919BC63DE885B34D0FD6E3DB2887D8B46FE249886ACED6B46DFCD5553168185FD376122171CD8927E60120FA8D01F01D03E58281FEA9A1ABE97631C828E41815F34FDCDF787419FE13A3137649AA93D2584230DF5F24B5C00C88B7D7DE4367693428C730376F218A53E853B0851BAB7C53C15DA7839CBE1285DB63F6FA45C1BB59FE1C5BB918F0F8459D7EF60ACFF5C0FA0F3FCAD1C5F4CE4416D4F4B36B05CDCEBE4FB879E95847EFBC6449CD190248843BC7EDB145FBFC4EDBB1A3C959298F08F3BA2CFBE231BBE204BE6F906209D28BD4820AB3E7BE96C26AE8A809ADD8D1A5A0B008E9570FA4C4697E116B8119892C604293683A9635F
diff --git a/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key
new file mode 100644
index 0000000000..79968bdd7d
--- /dev/null
+++ b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key
@@ -0,0 +1,16 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXQIBAAKBgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8semM4q843337
+zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RWRWzjaxSB
+6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4QIDAQAB
+AoGANmvJzJO5hkLuvyDZHKfAnGTtpifcR1wtSa9DjdKUyn8vhKF0mIimnbnYQEmW
+NUUb3gXCZLi9PvkpRSVRrASDOZwcjoU/Kvww163vBUVb2cOZfFhyn6o2Sk88Tt++
+udH3hdjpf9i7jTtUkUe+QYPsia+wgvvrmn4QrahLAH86+kECQQDx5gFeXTME3cnW
+WMpFz3PPumduzjqgqMMWEccX4FtQkMX/gyGa5UC7OHFyh0N/gSWvPbRHa8A6YgIt
+n8DO+fh5AkEAzbqX4DOn8NY6xJIi42q7l/2jIA0RkB6P7YugW5NblhqBZ0XDnpA5
+sMt+rz+K07u9XZtxgh1xi7mNfwY6lEAMqQJBAJBEauCKmRj35Z6OyeQku59SPsnY
++SJEREVvSNw2lH9SOKQQ4wPsYlTGbvKtNVZgAcen91L5MmYfeckYE/fdIZECQQCt
+64zxsTnM1I8iFxj/gP/OYlJBikrKt8udWmjaghzvLMEw+T2DExJyb9ZNeT53+UMB
+m6O+B/4xzU/djvp+0hbhAkAemIt+rA5kTmYlFndhpvzkSSM8a2EXsO4XIPgGWCTT
+tQKS/tTly0ADMjN/TVy11+9d6zcqadNVuHXHGtR4W0GR
+-----END RSA PRIVATE KEY-----
+
diff --git a/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub
new file mode 100644
index 0000000000..75d2025c71
--- /dev/null
+++ b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub
@@ -0,0 +1,5 @@
+---- BEGIN SSH2 PUBLIC KEY ----
+AAAAB3NzaC1yc2EAAAADAQABAAAAgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8
+semM4q843337zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RW
+RWzjaxSB6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4Q==
+---- END SSH2 PUBLIC KEY ----
diff --git a/lib/ssh/test/ssh_sftpd_SUITE.erl b/lib/ssh/test/ssh_sftpd_SUITE.erl
index 52a26110c4..b167f98ac8 100644
--- a/lib/ssh/test/ssh_sftpd_SUITE.erl
+++ b/lib/ssh/test/ssh_sftpd_SUITE.erl
@@ -65,7 +65,12 @@ all() ->
ver3_open_flags,
relpath,
sshd_read_file,
- ver6_basic].
+ ver6_basic,
+ access_outside_root,
+ root_with_cwd,
+ relative_path,
+ open_file_dir_v5,
+ open_file_dir_v6].
groups() ->
[].
@@ -117,6 +122,31 @@ init_per_testcase(TestCase, Config) ->
ver6_basic ->
SubSystems = [ssh_sftpd:subsystem_spec([{sftpd_vsn, 6}])],
ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ access_outside_root ->
+ %% Build RootDir/access_outside_root/a/b and set Root and CWD
+ BaseDir = filename:join(PrivDir, access_outside_root),
+ RootDir = filename:join(BaseDir, a),
+ CWD = filename:join(RootDir, b),
+ %% Make the directory chain:
+ ok = filelib:ensure_dir(filename:join(CWD, tmp)),
+ SubSystems = [ssh_sftpd:subsystem_spec([{root, RootDir},
+ {cwd, CWD}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ root_with_cwd ->
+ RootDir = filename:join(PrivDir, root_with_cwd),
+ CWD = filename:join(RootDir, home),
+ SubSystems = [ssh_sftpd:subsystem_spec([{root, RootDir}, {cwd, CWD}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ relative_path ->
+ SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ open_file_dir_v5 ->
+ SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ open_file_dir_v6 ->
+ SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir},
+ {sftpd_vsn, 6}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
_ ->
SubSystems = [ssh_sftpd:subsystem_spec([])],
ssh:daemon(0, [{subsystems, SubSystems}|Options])
@@ -128,8 +158,7 @@ init_per_testcase(TestCase, Config) ->
[{user_dir, ClientUserDir},
{user, ?USER}, {password, ?PASSWD},
{user_interaction, false},
- {silently_accept_hosts, true},
- {pwdfun, fun(_,_) -> true end}]),
+ {silently_accept_hosts, true}]),
{ok, Channel} =
ssh_connection:session_channel(Cm, ?XFER_WINDOW_SIZE,
?XFER_PACKET_SIZE, ?TIMEOUT),
@@ -646,6 +675,133 @@ ver6_basic(Config) when is_list(Config) ->
open_file(PrivDir, Cm, Channel, ReqId,
?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
?SSH_FXF_OPEN_EXISTING).
+
+%%--------------------------------------------------------------------
+access_outside_root() ->
+ [{doc, "Try access files outside the tree below RootDir"}].
+access_outside_root(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ BaseDir = filename:join(PrivDir, access_outside_root),
+ %% A file outside the tree below RootDir which is BaseDir/a
+ %% Make the file BaseDir/bad :
+ BadFilePath = filename:join([BaseDir, bad]),
+ ok = file:write_file(BadFilePath, <<>>),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ %% Try to access a file parallell to the RootDir:
+ try_access("/../bad", Cm, Channel, 0),
+ %% Try to access the same file via the CWD which is /b relative to the RootDir:
+ try_access("../../bad", Cm, Channel, 1).
+
+
+try_access(Path, Cm, Channel, ReqId) ->
+ Return =
+ open_file(Path, Cm, Channel, ReqId,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING),
+ ct:log("Try open ~p -> ~p",[Path,Return]),
+ case Return of
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), _Handle0/binary>>, _} ->
+ ct:fail("Could open a file outside the root tree!");
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId), ?UINT32(Code), Rest/binary>>, <<>>} ->
+ case Code of
+ ?SSH_FX_FILE_IS_A_DIRECTORY ->
+ ct:pal("Got the expected SSH_FX_FILE_IS_A_DIRECTORY status",[]),
+ ok;
+ ?SSH_FX_FAILURE ->
+ ct:pal("Got the expected SSH_FX_FAILURE status",[]),
+ ok;
+ _ ->
+ case Rest of
+ <<?UINT32(Len), Txt:Len/binary, _/binary>> ->
+ ct:fail("Got unexpected SSH_FX_code: ~p (~p)",[Code,Txt]);
+ _ ->
+ ct:fail("Got unexpected SSH_FX_code: ~p",[Code])
+ end
+ end;
+ _ ->
+ ct:fail("Completly unexpected return: ~p", [Return])
+ end.
+
+%%--------------------------------------------------------------------
+root_with_cwd() ->
+ [{doc, "Check if files are found, if the CWD and Root are specified"}].
+root_with_cwd(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ RootDir = filename:join(PrivDir, root_with_cwd),
+ CWD = filename:join(RootDir, home),
+ FileName = "root_with_cwd.txt",
+ FilePath = filename:join(CWD, FileName),
+ ok = filelib:ensure_dir(FilePath),
+ ok = file:write_file(FilePath ++ "0", <<>>),
+ ok = file:write_file(FilePath ++ "1", <<>>),
+ ok = file:write_file(FilePath ++ "2", <<>>),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ ReqId0 = 0,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId0), _Handle0/binary>>, _} =
+ open_file(FileName ++ "0", Cm, Channel, ReqId0,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING),
+ ReqId1 = 1,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId1), _Handle1/binary>>, _} =
+ open_file("./" ++ FileName ++ "1", Cm, Channel, ReqId1,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING),
+ ReqId2 = 2,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId2), _Handle2/binary>>, _} =
+ open_file("/home/" ++ FileName ++ "2", Cm, Channel, ReqId2,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING).
+
+%%--------------------------------------------------------------------
+relative_path() ->
+ [{doc, "Test paths relative to CWD when opening a file handle."}].
+relative_path(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ FileName = "test_relative_path.txt",
+ FilePath = filename:join(PrivDir, FileName),
+ ok = filelib:ensure_dir(FilePath),
+ ok = file:write_file(FilePath, <<>>),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ ReqId = 0,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), _Handle/binary>>, _} =
+ open_file(FileName, Cm, Channel, ReqId,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING).
+
+%%--------------------------------------------------------------------
+open_file_dir_v5() ->
+ [{doc, "Test if open_file fails when opening existing directory."}].
+open_file_dir_v5(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ FileName = "open_file_dir_v5",
+ FilePath = filename:join(PrivDir, FileName),
+ ok = filelib:ensure_dir(FilePath),
+ ok = file:make_dir(FilePath),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ ReqId = 0,
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId),
+ ?UINT32(?SSH_FX_FAILURE), _/binary>>, _} =
+ open_file(FileName, Cm, Channel, ReqId,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING).
+
+%%--------------------------------------------------------------------
+open_file_dir_v6() ->
+ [{doc, "Test if open_file fails when opening existing directory."}].
+open_file_dir_v6(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ FileName = "open_file_dir_v6",
+ FilePath = filename:join(PrivDir, FileName),
+ ok = filelib:ensure_dir(FilePath),
+ ok = file:make_dir(FilePath),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ ReqId = 0,
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId),
+ ?UINT32(?SSH_FX_FILE_IS_A_DIRECTORY), _/binary>>, _} =
+ open_file(FileName, Cm, Channel, ReqId,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING).
+
%%--------------------------------------------------------------------
%% Internal functions ------------------------------------------------
%%--------------------------------------------------------------------
@@ -688,9 +844,7 @@ reply(Cm, Channel, RBuf) ->
30000 -> ct:fail("timeout ~p:~p",[?MODULE,?LINE])
end.
-
open_file(File, Cm, Channel, ReqId, Access, Flags) ->
-
Data = list_to_binary([?uint32(ReqId),
?binary(list_to_binary(File)),
?uint32(Access),
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl b/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
index 56a33d6349..b4d7eadfa4 100644
--- a/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
@@ -65,6 +65,7 @@ init_per_suite(Config) ->
{ok, FileInfo} = file:read_file_info(FileName),
ok = file:write_file_info(FileName,
FileInfo#file_info{mode = 8#400}),
+ ssh_test_lib:setup_rsa(DataDir, PrivDir),
ssh_test_lib:setup_dsa(DataDir, PrivDir),
Config
end).
@@ -73,6 +74,7 @@ end_per_suite(Config) ->
UserDir = filename:join(proplists:get_value(priv_dir, Config), nopubkey),
file:del_dir(UserDir),
SysDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:clean_rsa(SysDir),
ssh_test_lib:clean_dsa(SysDir),
ok.
@@ -187,7 +189,6 @@ quit(Config) when is_list(Config) ->
timer:sleep(5000),
{ok, NewSftp, _Conn} = ssh_sftp:start_channel(Host, Port,
[{silently_accept_hosts, true},
- {pwdfun, fun(_,_) -> true end},
{user_dir, UserDir},
{user, ?USER}, {password, ?PASSWD}]),
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa
new file mode 100644
index 0000000000..9d7e0dd5fb
--- /dev/null
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa
@@ -0,0 +1,15 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXAIBAAKBgQD1OET+3O/Bvj/dtjxDTXmj1oiJt4sIph5kGy0RfjoPrZfaS+CU
+DhakCmS6t2ivxWFgtpKWaoGMZMJqWj6F6ZsumyFl3FPBtujwY/35cgifrI9Ns4Tl
+zR1uuengNBmV+WRQ5cd9F2qS6Z8aDQihzt0r8JUqLcK+VQbrmNzboCCQQwIDAQAB
+AoGAPQEyqPTt8JUT7mRXuaacjFXiweAXhp9NEDpyi9eLOjtFe9lElZCrsUOkq47V
+TGUeRKEm9qSodfTbKPoqc8YaBJGJPhUaTAcha+7QcDdfHBvIsgxvU7ePVnlpXRp3
+CCUEMPhlnx6xBoTYP+fRU0e3+xJIPVyVCqX1jAdUMkzfRoECQQD6ux7B1QJAIWyK
+SGkbDUbBilNmzCFNgIpOP6PA+bwfi5d16diTpra5AX09keQABAo/KaP1PdV8Vg0p
+z4P3A7G3AkEA+l+AKG6m0kQTTBMJDqOdVPYwe+5GxunMaqmhokpEbuGsrZBl5Dvd
+WpcBjR7jmenrhKZRIuA+Fz5HPo/UQJPl1QJBAKxstDkeED8j/S2XoFhPKAJ+6t39
+sUVICVTIZQeXdmzHJXCcUSkw8+WEhakqw/3SyW0oaK2FSWQJFWJUZ+8eJj8CQEh3
+xeduB5kKnS9CvzdeghZqX6QvVosSdtlUmfUYW/BgH5PpHKTP8wTaeld3XldZTpMJ
+dKiMkUw2+XYROVUrubUCQD+Na1LhULlpn4ISEtIEfqpdlUhxDgO15Wg8USmsng+x
+ICliVOSQtwaZjm8kwaFt0W7XnpnDxbRs37vIEbIMWak=
+-----END RSA PRIVATE KEY-----
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key
new file mode 100644
index 0000000000..79968bdd7d
--- /dev/null
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key
@@ -0,0 +1,16 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXQIBAAKBgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8semM4q843337
+zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RWRWzjaxSB
+6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4QIDAQAB
+AoGANmvJzJO5hkLuvyDZHKfAnGTtpifcR1wtSa9DjdKUyn8vhKF0mIimnbnYQEmW
+NUUb3gXCZLi9PvkpRSVRrASDOZwcjoU/Kvww163vBUVb2cOZfFhyn6o2Sk88Tt++
+udH3hdjpf9i7jTtUkUe+QYPsia+wgvvrmn4QrahLAH86+kECQQDx5gFeXTME3cnW
+WMpFz3PPumduzjqgqMMWEccX4FtQkMX/gyGa5UC7OHFyh0N/gSWvPbRHa8A6YgIt
+n8DO+fh5AkEAzbqX4DOn8NY6xJIi42q7l/2jIA0RkB6P7YugW5NblhqBZ0XDnpA5
+sMt+rz+K07u9XZtxgh1xi7mNfwY6lEAMqQJBAJBEauCKmRj35Z6OyeQku59SPsnY
++SJEREVvSNw2lH9SOKQQ4wPsYlTGbvKtNVZgAcen91L5MmYfeckYE/fdIZECQQCt
+64zxsTnM1I8iFxj/gP/OYlJBikrKt8udWmjaghzvLMEw+T2DExJyb9ZNeT53+UMB
+m6O+B/4xzU/djvp+0hbhAkAemIt+rA5kTmYlFndhpvzkSSM8a2EXsO4XIPgGWCTT
+tQKS/tTly0ADMjN/TVy11+9d6zcqadNVuHXHGtR4W0GR
+-----END RSA PRIVATE KEY-----
+
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub
new file mode 100644
index 0000000000..75d2025c71
--- /dev/null
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub
@@ -0,0 +1,5 @@
+---- BEGIN SSH2 PUBLIC KEY ----
+AAAAB3NzaC1yc2EAAAADAQABAAAAgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8
+semM4q843337zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RW
+RWzjaxSB6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4Q==
+---- END SSH2 PUBLIC KEY ----
diff --git a/lib/ssh/test/ssh_test_lib.erl b/lib/ssh/test/ssh_test_lib.erl
index 286ac6e882..1673f52821 100644
--- a/lib/ssh/test/ssh_test_lib.erl
+++ b/lib/ssh/test/ssh_test_lib.erl
@@ -690,13 +690,16 @@ ssh_type() ->
ssh_type1() ->
try
+ ct:log("~p:~p os:find_executable(\"ssh\")",[?MODULE,?LINE]),
case os:find_executable("ssh") of
false ->
ct:log("~p:~p Executable \"ssh\" not found",[?MODULE,?LINE]),
not_found;
- _ ->
+ Path ->
+ ct:log("~p:~p Found \"ssh\" at ~p",[?MODULE,?LINE,Path]),
case os:cmd("ssh -V") of
- "OpenSSH" ++ _ ->
+ Version = "OpenSSH" ++ _ ->
+ ct:log("~p:~p Found OpenSSH ~p",[?MODULE,?LINE,Version]),
openSSH;
Str ->
ct:log("ssh client ~p is unknown",[Str]),
diff --git a/lib/ssh/test/ssh_to_openssh_SUITE.erl b/lib/ssh/test/ssh_to_openssh_SUITE.erl
index 86c3d5de26..687e6efaf3 100644
--- a/lib/ssh/test/ssh_to_openssh_SUITE.erl
+++ b/lib/ssh/test/ssh_to_openssh_SUITE.erl
@@ -36,7 +36,7 @@
%%--------------------------------------------------------------------
suite() ->
- [{timetrap,{seconds,20}}].
+ [{timetrap,{seconds,60}}].
all() ->
case os:find_executable("ssh") of
@@ -381,7 +381,6 @@ erlang_server_openssh_client_public_key_X(Config, PubKeyAlg) ->
PrivDir = proplists:get_value(priv_dir, Config),
KnownHosts = filename:join(PrivDir, "known_hosts"),
{Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
- {public_key_alg, PubKeyAlg},
{failfun, fun ssh_test_lib:failfun/2}]),
ct:sleep(500),
@@ -402,7 +401,6 @@ erlang_server_openssh_client_renegotiate(Config) ->
KnownHosts = filename:join(PrivDir, "known_hosts"),
{Pid, Host, Port} = ssh_test_lib:daemon([{system_dir, SystemDir},
- {public_key_alg, PubKeyAlg},
{failfun, fun ssh_test_lib:failfun/2}]),
ct:sleep(500),
@@ -442,7 +440,7 @@ erlang_server_openssh_client_renegotiate(Config) ->
ssh_test_lib:rcv_expected(Expect, OpenSsh, ?TIMEOUT)
of
_ ->
- %% Unfortunatly we can't check that there has been a renegotiation, just trust OpenSSH.
+ %% Unfortunately we can't check that there has been a renegotiation, just trust OpenSSH.
ssh:stop_daemon(Pid)
catch
throw:{skip,R} -> {skip,R}
@@ -464,6 +462,7 @@ erlang_client_openssh_server_renegotiate(_Config) ->
{silently_accept_hosts,true}],
group_leader(IO, self()),
{ok, ConnRef} = ssh:connect(Host, ?SSH_DEFAULT_PORT, Options),
+ ct:pal("Parent = ~p, IO = ~p, Shell = ~p, ConnRef = ~p~n",[Parent, IO, self(), ConnRef]),
case ssh_connection:session_channel(ConnRef, infinity) of
{ok,ChannelId} ->
success = ssh_connection:ptty_alloc(ConnRef, ChannelId, []),
diff --git a/lib/ssh/test/ssh_trpt_test_lib.erl b/lib/ssh/test/ssh_trpt_test_lib.erl
index bc86000d81..261239c152 100644
--- a/lib/ssh/test/ssh_trpt_test_lib.erl
+++ b/lib/ssh/test/ssh_trpt_test_lib.erl
@@ -85,15 +85,18 @@ exec(Op, S0=#s{}) ->
throw:Term ->
report_trace(throw, Term, S1),
- throw(Term);
+ throw({Term,Op});
error:Error ->
report_trace(error, Error, S1),
- error(Error);
+ error({Error,Op});
exit:Exit ->
report_trace(exit, Exit, S1),
- exit(Exit)
+ exit({Exit,Op});
+ Cls:Err ->
+ ct:pal("Class=~p, Error=~p", [Cls,Err]),
+ error({"fooooooO",Op})
end;
exec(Op, {ok,S=#s{}}) -> exec(Op, S);
exec(_, Error) -> Error.
@@ -111,20 +114,20 @@ op({accept,Opts}, S) when ?role(S) == server ->
{ok,Socket} = gen_tcp:accept(S#s.listen_socket, S#s.timeout),
{Host,_Port} = ok(inet:sockname(Socket)),
S#s{socket = Socket,
- ssh = init_ssh(server,Socket,[{host,host(Host)}|Opts]),
+ ssh = init_ssh(server, Socket, host(Host), Opts),
return_value = ok};
%%%---- Client ops
op({connect,Host,Port,Opts}, S) when ?role(S) == undefined ->
Socket = ok(gen_tcp:connect(host(Host), Port, mangle_opts([]))),
S#s{socket = Socket,
- ssh = init_ssh(client, Socket, [{host,host(Host)}|Opts]),
+ ssh = init_ssh(client, Socket, host(Host), Opts),
return_value = ok};
%%%---- ops for both client and server
op(close_socket, S) ->
- catch tcp_gen:close(S#s.socket),
- catch tcp_gen:close(S#s.listen_socket),
+ catch gen_tcp:close(S#s.socket),
+ catch gen_tcp:close(S#s.listen_socket),
S#s{socket = undefined,
listen_socket = undefined,
return_value = ok};
@@ -293,12 +296,14 @@ instantiate(X, _S) ->
%%%================================================================
%%%
-init_ssh(Role, Socket, Options0) ->
- Options = [{user_interaction, false},
- {vsn, {2,0}},
- {id_string, "ErlangTestLib"}
- | Options0],
- ssh_connection_handler:init_ssh_record(Role, Socket, Options).
+init_ssh(Role, Socket, Host, UserOptions0) ->
+ UserOptions = [{user_interaction, false},
+ {vsn, {2,0}},
+ {id_string, "ErlangTestLib"}
+ | UserOptions0],
+ Opts = ?PUT_INTERNAL_OPT({host,Host},
+ ssh_options:handle_options(Role, UserOptions)),
+ ssh_connection_handler:init_ssh_record(Role, Socket, Opts).
mangle_opts(Options) ->
SysOpts = [{reuseaddr, true},
diff --git a/lib/ssl/doc/src/ssl.xml b/lib/ssl/doc/src/ssl.xml
index edc7e0d8b2..916b41742e 100644
--- a/lib/ssl/doc/src/ssl.xml
+++ b/lib/ssl/doc/src/ssl.xml
@@ -424,6 +424,14 @@ marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_valid
</taglist>
</item>
+
+ <tag><c>max_handshake_size</c></tag>
+ <item>
+ <p>Integer (24 bits unsigned). Used to limit the size of
+ valid TLS handshake packets to avoid DoS attacks.
+ Defaults to 256*1024.</p>
+ </item>
+
</taglist>
</item>
diff --git a/lib/ssl/doc/src/ssl_session_cache_api.xml b/lib/ssl/doc/src/ssl_session_cache_api.xml
index b85d8fb284..1b41eae89d 100644
--- a/lib/ssl/doc/src/ssl_session_cache_api.xml
+++ b/lib/ssl/doc/src/ssl_session_cache_api.xml
@@ -11,7 +11,7 @@
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-
+
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
@@ -62,8 +62,8 @@
</taglist>
</section>
-
- <funcs>
+
+ <funcs>
<func>
<name>delete(Cache, Key) -> _</name>
@@ -134,7 +134,7 @@
</p>
</desc>
</func>
-
+
<func>
<name>select_session(Cache, PartialKey) -> [session()]</name>
<fsummary>Selects sessions that can be reused.</fsummary>
@@ -151,6 +151,21 @@
</func>
<func>
+ <name>size(Cache) -> integer()</name>
+ <fsummary>Returns the number of sessions in the cache.</fsummary>
+ <type>
+ <v>Cache = cache_ref()</v>
+ </type>
+ <desc>
+ <p>Returns the number of sessions in the cache. If size
+ exceeds the maximum number of sessions, the current cache
+ entries will be invalidated regardless of their remaining
+ lifetime. Is to be callable from any process.
+ </p>
+ </desc>
+ </func>
+
+ <func>
<name>terminate(Cache) -> _</name>
<fsummary>Called by the process that handles the cache when it
is about to terminate.</fsummary>
@@ -178,7 +193,7 @@
</p>
</desc>
</func>
-
- </funcs>
-
+
+ </funcs>
+
</erlref>
diff --git a/lib/ssl/src/Makefile b/lib/ssl/src/Makefile
index 3dda1a3316..2e7df9792e 100644
--- a/lib/ssl/src/Makefile
+++ b/lib/ssl/src/Makefile
@@ -48,9 +48,17 @@ MODULES= \
dtls \
ssl_alert \
ssl_app \
- ssl_dist_sup\
ssl_sup \
+ ssl_admin_sup\
+ tls_connection_sup \
+ ssl_connection_sup \
+ ssl_listen_tracker_sup\
+ dtls_connection_sup \
+ dtls_udp_listener\
dtls_udp_sup \
+ ssl_dist_sup\
+ ssl_dist_admin_sup\
+ ssl_dist_connection_sup\
inet_tls_dist \
inet6_tls_dist \
ssl_certificate\
@@ -61,21 +69,18 @@ MODULES= \
dtls_connection \
ssl_config \
ssl_connection \
- tls_connection_sup \
- dtls_connection_sup \
tls_handshake \
dtls_handshake\
ssl_handshake\
ssl_manager \
ssl_session \
ssl_session_cache \
+ ssl_pem_cache \
ssl_crl\
ssl_crl_cache \
ssl_crl_hash_dir \
tls_socket \
dtls_socket \
- dtls_udp_listener\
- ssl_listen_tracker_sup \
tls_record \
dtls_record \
ssl_record \
diff --git a/lib/ssl/src/dtls_connection.erl b/lib/ssl/src/dtls_connection.erl
index 070a90d481..f607c86ae3 100644
--- a/lib/ssl/src/dtls_connection.erl
+++ b/lib/ssl/src/dtls_connection.erl
@@ -39,7 +39,7 @@
-export([start_fsm/8, start_link/7, init/1]).
%% State transition handling
--export([next_record/1, next_event/3]).
+-export([next_record/1, next_event/3, next_event/4]).
%% Handshake handling
-export([renegotiate/2,
@@ -53,7 +53,7 @@
%% Data handling
-export([encode_data/3, passive_receive/2, next_record_if_active/1, handle_common_event/4,
- send/3]).
+ send/3, socket/5]).
%% gen_statem state functions
-export([init/3, error/3, downgrade/3, %% Initiation and take down states
@@ -77,20 +77,6 @@ start_fsm(Role, Host, Port, Socket, {#ssl_options{erl_dist = false},_, Tracker}
catch
error:{badmatch, {error, _} = Error} ->
Error
- end;
-
-start_fsm(Role, Host, Port, Socket, {#ssl_options{erl_dist = true},_, Tracker} = Opts,
- User, {CbModule, _,_, _} = CbInfo,
- Timeout) ->
- try
- {ok, Pid} = dtls_connection_sup:start_child_dist([Role, Host, Port, Socket,
- Opts, User, CbInfo]),
- {ok, SslSocket} = ssl_connection:socket_control(?MODULE, Socket, Pid, CbModule, Tracker),
- ok = ssl_connection:handshake(SslSocket, Timeout),
- {ok, SslSocket}
- catch
- error:{badmatch, {error, _} = Error} ->
- Error
end.
send_handshake(Handshake, #state{connection_states = ConnectionStates} = States) ->
@@ -201,6 +187,7 @@ reinit_handshake_data(#state{protocol_buffers = Buffers} = State) ->
State#state{premaster_secret = undefined,
public_key_info = undefined,
tls_handshake_history = ssl_handshake:init_handshake_history(),
+ flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT},
protocol_buffers =
Buffers#protocol_buffers{
dtls_handshake_next_seq = 0,
@@ -213,6 +200,9 @@ select_sni_extension(#client_hello{extensions = HelloExtensions}) ->
select_sni_extension(_) ->
undefined.
+socket(Pid, Transport, Socket, Connection, _) ->
+ dtls_socket:socket(Pid, Transport, Socket, Connection).
+
%%====================================================================
%% tls_connection_sup API
%%====================================================================
@@ -243,7 +233,7 @@ callback_mode() ->
state_functions.
%%--------------------------------------------------------------------
-%% State functionsconnection/2
+%% State functions
%%--------------------------------------------------------------------
init({call, From}, {start, Timeout},
@@ -262,17 +252,19 @@ init({call, From}, {start, Timeout},
Version = Hello#client_hello.client_version,
HelloVersion = dtls_record:lowest_protocol_version(SslOpts#ssl_options.versions),
State1 = prepare_flight(State0#state{negotiated_version = Version}),
- State2 = send_handshake(Hello, State1#state{negotiated_version = HelloVersion}),
+ {State2, Actions} = send_handshake(Hello, State1#state{negotiated_version = HelloVersion}),
State3 = State2#state{negotiated_version = Version, %% Requested version
session =
Session0#session{session_id = Hello#client_hello.session_id},
start_or_recv_from = From,
- timer = Timer},
+ timer = Timer,
+ flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT}
+ },
{Record, State} = next_record(State3),
- next_event(hello, Record, State);
+ next_event(hello, Record, State, Actions);
init({call, _} = Type, Event, #state{role = server, transport_cb = gen_udp} = State) ->
ssl_connection:init(Type, Event,
- State#state{flight_state = {waiting, undefined, ?INITIAL_RETRANSMIT_TIMEOUT}},
+ State#state{flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT}},
?MODULE);
init({call, _} = Type, Event, #state{role = server} = State) ->
%% I.E. DTLS over sctp
@@ -302,9 +294,9 @@ hello(internal, #client_hello{cookie = <<>>,
Cookie = dtls_handshake:cookie(<<"secret">>, IP, Port, Hello),
VerifyRequest = dtls_handshake:hello_verify_request(Cookie, Version),
State1 = prepare_flight(State0#state{negotiated_version = Version}),
- State2 = send_handshake(VerifyRequest, State1),
+ {State2, Actions} = send_handshake(VerifyRequest, State1),
{Record, State} = next_record(State2),
- next_event(hello, Record, State#state{tls_handshake_history = ssl_handshake:init_handshake_history()});
+ next_event(hello, Record, State#state{tls_handshake_history = ssl_handshake:init_handshake_history()}, Actions);
hello(internal, #client_hello{cookie = Cookie} = Hello, #state{role = server,
transport_cb = Transport,
socket = Socket} = State0) ->
@@ -333,13 +325,13 @@ hello(internal, #hello_verify_request{cookie = Cookie}, #state{role = client,
Cache, CacheCb, Renegotiation, OwnCert),
Version = Hello#client_hello.client_version,
HelloVersion = dtls_record:lowest_protocol_version(SslOpts#ssl_options.versions),
- State2 = send_handshake(Hello, State1#state{negotiated_version = HelloVersion}),
+ {State2, Actions} = send_handshake(Hello, State1#state{negotiated_version = HelloVersion}),
State3 = State2#state{negotiated_version = Version, %% Requested version
session =
Session0#session{session_id =
Hello#client_hello.session_id}},
{Record, State} = next_record(State3),
- next_event(hello, Record, State);
+ next_event(hello, Record, State, Actions);
hello(internal, #server_hello{} = Hello,
#state{connection_states = ConnectionStates0,
negotiated_version = ReqVersion,
@@ -356,13 +348,13 @@ hello(internal, #server_hello{} = Hello,
hello(internal, {handshake, {#client_hello{cookie = <<>>} = Handshake, _}}, State) ->
%% Initial hello should not be in handshake history
{next_state, hello, State, [{next_event, internal, Handshake}]};
-
hello(internal, {handshake, {#hello_verify_request{} = Handshake, _}}, State) ->
%% hello_verify should not be in handshake history
{next_state, hello, State, [{next_event, internal, Handshake}]};
-
hello(info, Event, State) ->
handle_info(Event, hello, State);
+hello(state_timeout, Event, State) ->
+ handle_state_timeout(Event, hello, State);
hello(Type, Event, State) ->
ssl_connection:hello(Type, Event, State, ?MODULE).
@@ -375,7 +367,11 @@ abbreviated(internal = Type,
ConnectionStates = dtls_record:next_epoch(ConnectionStates1, read),
ssl_connection:abbreviated(Type, Event, State#state{connection_states = ConnectionStates}, ?MODULE);
abbreviated(internal = Type, #finished{} = Event, #state{connection_states = ConnectionStates} = State) ->
- ssl_connection:cipher(Type, Event, prepare_flight(State#state{connection_states = ConnectionStates}), ?MODULE);
+ ssl_connection:abbreviated(Type, Event,
+ prepare_flight(State#state{connection_states = ConnectionStates,
+ flight_state = connection}), ?MODULE);
+abbreviated(state_timeout, Event, State) ->
+ handle_state_timeout(Event, abbreviated, State);
abbreviated(Type, Event, State) ->
ssl_connection:abbreviated(Type, Event, State, ?MODULE).
@@ -383,6 +379,8 @@ certify(info, Event, State) ->
handle_info(Event, certify, State);
certify(internal = Type, #server_hello_done{} = Event, State) ->
ssl_connection:certify(Type, Event, prepare_flight(State), ?MODULE);
+certify(state_timeout, Event, State) ->
+ handle_state_timeout(Event, certify, State);
certify(Type, Event, State) ->
ssl_connection:certify(Type, Event, State, ?MODULE).
@@ -395,7 +393,11 @@ cipher(internal = Type, #change_cipher_spec{type = <<1>>} = Event,
ssl_connection:cipher(Type, Event, State#state{connection_states = ConnectionStates}, ?MODULE);
cipher(internal = Type, #finished{} = Event, #state{connection_states = ConnectionStates} = State) ->
ssl_connection:cipher(Type, Event,
- prepare_flight(State#state{connection_states = ConnectionStates}), ?MODULE);
+ prepare_flight(State#state{connection_states = ConnectionStates,
+ flight_state = connection}),
+ ?MODULE);
+cipher(state_timeout, Event, State) ->
+ handle_state_timeout(Event, cipher, State);
cipher(Type, Event, State) ->
ssl_connection:cipher(Type, Event, State, ?MODULE).
@@ -409,12 +411,12 @@ connection(internal, #hello_request{}, #state{host = Host, port = Port,
renegotiation = {Renegotiation, _}} = State0) ->
Hello = dtls_handshake:client_hello(Host, Port, ConnectionStates0, SslOpts,
Cache, CacheCb, Renegotiation, Cert),
- State1 = send_handshake(Hello, State0),
+ {State1, Actions} = send_handshake(Hello, State0),
{Record, State} =
next_record(
State1#state{session = Session0#session{session_id
= Hello#client_hello.session_id}}),
- next_event(hello, Record, State);
+ next_event(hello, Record, State, Actions);
connection(internal, #client_hello{} = Hello, #state{role = server, allow_renegotiate = true} = State) ->
%% Mitigate Computational DoS attack
%% http://www.educatedguesswork.org/2011/10/ssltls_and_computational_dos.html
@@ -434,7 +436,6 @@ connection(Type, Event, State) ->
downgrade(Type, Event, State) ->
ssl_connection:downgrade(Type, Event, State, ?MODULE).
-
%%--------------------------------------------------------------------
%% Description: This function is called by a gen_fsm when it receives any
%% other message than a synchronous or asynchronous event
@@ -442,16 +443,6 @@ downgrade(Type, Event, State) ->
%%--------------------------------------------------------------------
%% raw data from socket, unpack records
-handle_info({_,flight_retransmission_timeout}, connection, _) ->
- {next_state, keep_state_and_data};
-handle_info({Ref, flight_retransmission_timeout}, StateName,
- #state{flight_state = {waiting, Ref, NextTimeout}} = State0) ->
- State1 = send_handshake_flight(State0#state{flight_state = {retransmit_timer, NextTimeout}},
- retransmit_epoch(StateName, State0)),
- {Record, State} = next_record(State1),
- next_event(StateName, Record, State);
-handle_info({_, flight_retransmission_timeout}, _, _) ->
- {next_state, keep_state_and_data};
handle_info({Protocol, _, _, _, Data}, StateName,
#state{data_tag = Protocol} = State0) ->
case next_dtls_record(Data, State0) of
@@ -489,7 +480,6 @@ handle_call(Event, From, StateName, State) ->
handle_common_event(internal, #alert{} = Alert, StateName,
#state{negotiated_version = Version} = State) ->
ssl_connection:handle_own_alert(Alert, Version, StateName, State);
-
%%% DTLS record protocol level handshake messages
handle_common_event(internal, #ssl_tls{type = ?HANDSHAKE,
fragment = Data},
@@ -498,19 +488,14 @@ handle_common_event(internal, #ssl_tls{type = ?HANDSHAKE,
negotiated_version = Version} = State0) ->
try
case dtls_handshake:get_dtls_handshake(Version, Data, Buffers0) of
- {more_data, Buffers} ->
+ {[], Buffers} ->
{Record, State} = next_record(State0#state{protocol_buffers = Buffers}),
next_event(StateName, Record, State);
{Packets, Buffers} ->
State = State0#state{protocol_buffers = Buffers},
Events = dtls_handshake_events(Packets),
- case StateName of
- connection ->
- ssl_connection:hibernate_after(StateName, State, Events);
- _ ->
- {next_state, StateName,
- State#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events}
- end
+ {next_state, StateName,
+ State#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events}
end
catch throw:#alert{} = Alert ->
ssl_connection:handle_own_alert(Alert, Version, StateName, State0)
@@ -534,6 +519,13 @@ handle_common_event(internal, #ssl_tls{type = ?ALERT, fragment = EncAlerts}, Sta
handle_common_event(internal, #ssl_tls{type = _Unknown}, StateName, State) ->
{next_state, StateName, State}.
+handle_state_timeout(flight_retransmission_timeout, StateName,
+ #state{flight_state = {retransmit, NextTimeout}} = State0) ->
+ {State1, Actions} = send_handshake_flight(State0#state{flight_state = {retransmit, NextTimeout}},
+ retransmit_epoch(StateName, State0)),
+ {Record, State} = next_record(State1),
+ next_event(StateName, Record, State, Actions).
+
send(Transport, {_, {{_,_}, _} = Socket}, Data) ->
send(Transport, Socket, Data);
send(Transport, Socket, Data) ->
@@ -645,7 +637,8 @@ initial_state(Role, Host, Port, Socket, {SSLOptions, SocketOptions, _}, User,
allow_renegotiate = SSLOptions#ssl_options.client_renegotiation,
start_or_recv_from = undefined,
protocol_cb = ?MODULE,
- flight_buffer = new_flight()
+ flight_buffer = new_flight(),
+ flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT}
}.
next_dtls_record(Data, #state{protocol_buffers = #protocol_buffers{
@@ -714,14 +707,14 @@ next_event(connection = StateName, no_record,
#state{connection_states = #{current_read := #{epoch := CurrentEpoch}}} = State0, Actions) ->
case next_record_if_active(State0) of
{no_record, State} ->
- ssl_connection:hibernate_after(StateName, State, Actions);
+ ssl_connection:hibernate_after(StateName, State, Actions);
{#ssl_tls{epoch = CurrentEpoch} = Record, State} ->
{next_state, StateName, State, [{next_event, internal, {protocol_record, Record}} | Actions]};
{#ssl_tls{epoch = Epoch,
type = ?HANDSHAKE,
version = _Version}, State1} = _Record when Epoch == CurrentEpoch-1 ->
- State = send_handshake_flight(State1, Epoch),
- {next_state, StateName, State, Actions};
+ {State, MoreActions} = send_handshake_flight(State1, Epoch),
+ {next_state, StateName, State, Actions ++ MoreActions};
{#ssl_tls{epoch = _Epoch,
version = _Version}, State} ->
%% TODO maybe buffer later epoch
@@ -772,17 +765,20 @@ next_flight(Flight) ->
Flight#{handshakes => [],
change_cipher_spec => undefined,
handshakes_after_change_cipher_spec => []}.
-
start_flight(#state{transport_cb = gen_udp,
- flight_state = {retransmit_timer, Timeout}} = State) ->
- Ref = erlang:make_ref(),
- _ = erlang:send_after(Timeout, self(), {Ref, flight_retransmission_timeout}),
- State#state{flight_state = {waiting, Ref, new_timeout(Timeout)}};
-
+ flight_state = {retransmit, Timeout}} = State) ->
+ start_retransmision_timer(Timeout, State);
+start_flight(#state{transport_cb = gen_udp,
+ flight_state = connection} = State) ->
+ {State, []};
start_flight(State) ->
%% No retransmision needed i.e DTLS over SCTP
- State#state{flight_state = reliable}.
+ {State#state{flight_state = reliable}, []}.
+
+start_retransmision_timer(Timeout, State) ->
+ {State#state{flight_state = {retransmit, new_timeout(Timeout)}},
+ [{state_timeout, Timeout, flight_retransmission_timeout}]}.
new_timeout(N) when N =< 30 ->
N * 2;
@@ -806,13 +802,13 @@ renegotiate(#state{role = server,
connection_states = CS0} = State0, Actions) ->
HelloRequest = ssl_handshake:hello_request(),
CS = CS0#{write_msg_seq => 0},
- State1 = send_handshake(HelloRequest,
- State0#state{connection_states =
- CS}),
+ {State1, MoreActions} = send_handshake(HelloRequest,
+ State0#state{connection_states =
+ CS}),
Hs0 = ssl_handshake:init_handshake_history(),
{Record, State} = next_record(State1#state{tls_handshake_history = Hs0,
protocol_buffers = #protocol_buffers{}}),
- next_event(hello, Record, State, Actions).
+ next_event(hello, Record, State, Actions ++ MoreActions).
handle_alerts([], Result) ->
Result;
@@ -823,15 +819,11 @@ handle_alerts([Alert | Alerts], {next_state, StateName, State}) ->
handle_alerts([Alert | Alerts], {next_state, StateName, State, _Actions}) ->
handle_alerts(Alerts, ssl_connection:handle_alert(Alert, StateName, State)).
-retransmit_epoch(StateName, #state{connection_states = ConnectionStates}) ->
+retransmit_epoch(_StateName, #state{connection_states = ConnectionStates}) ->
#{epoch := Epoch} =
ssl_record:current_connection_state(ConnectionStates, write),
- case StateName of
- connection ->
- Epoch-1;
- _ ->
- Epoch
- end.
+ Epoch.
+
update_handshake_history(#hello_verify_request{}, _, Hist) ->
Hist;
@@ -846,3 +838,4 @@ unprocessed_events(Events) ->
%% handshake events left to process before we should
%% process more TLS-records received on the socket.
erlang:length(Events)-1.
+
diff --git a/lib/ssl/src/dtls_handshake.erl b/lib/ssl/src/dtls_handshake.erl
index af3708ddb7..fd1f9698fe 100644
--- a/lib/ssl/src/dtls_handshake.erl
+++ b/lib/ssl/src/dtls_handshake.erl
@@ -136,9 +136,11 @@ handshake_bin([Type, Length, Data], Seq) ->
%%--------------------------------------------------------------------
-spec get_dtls_handshake(dtls_record:dtls_version(), binary(), #protocol_buffers{}) ->
- {[{dtls_handshake(), binary()}], #protocol_buffers{}} | {more_data, #protocol_buffers{}}.
+ {[dtls_handshake()], #protocol_buffers{}}.
%%
-%% Description: ...
+%% Description: Given buffered and new data from dtls_record, collects
+%% and returns it as a list of handshake messages, also returns
+%% possible leftover data in the new "protocol_buffers".
%%--------------------------------------------------------------------
get_dtls_handshake(Version, Fragment, ProtocolBuffers) ->
handle_fragments(Version, Fragment, ProtocolBuffers, []).
@@ -288,8 +290,6 @@ do_handle_fragments(_, [], Buffers, Acc) ->
{lists:reverse(Acc), Buffers};
do_handle_fragments(Version, [Fragment | Fragments], Buffers0, Acc) ->
case reassemble(Version, Fragment, Buffers0) of
- {more_data, _} = More when Acc == []->
- More;
{more_data, Buffers} when Fragments == [] ->
{lists:reverse(Acc), Buffers};
{more_data, Buffers} ->
diff --git a/lib/ssl/src/dtls_record.erl b/lib/ssl/src/dtls_record.erl
index f447897d59..0ee51c24b6 100644
--- a/lib/ssl/src/dtls_record.erl
+++ b/lib/ssl/src/dtls_record.erl
@@ -393,7 +393,7 @@ init_connection_state_seq(_, ConnnectionStates) ->
integer().
%%
%% Description: Returns the epoch the connection_state record
-%% that is currently defined as the current conection state.
+%% that is currently defined as the current connection state.
%%--------------------------------------------------------------------
current_connection_state_epoch(#{current_read := #{epoch := Epoch}},
read) ->
diff --git a/lib/ssl/src/dtls_socket.erl b/lib/ssl/src/dtls_socket.erl
index 570b3ae83a..ac1a7b37c6 100644
--- a/lib/ssl/src/dtls_socket.erl
+++ b/lib/ssl/src/dtls_socket.erl
@@ -71,11 +71,14 @@ connect(Address, Port, #config{transport_info = {Transport, _, _, _} = CbInfo,
close(gen_udp, {_Client, _Socket}) ->
ok.
+socket(Pid, gen_udp = Transport, {{_, _}, Socket}, ConnectionCb) ->
+ #sslsocket{pid = Pid,
+ %% "The name "fd" is keept for backwards compatibility
+ fd = {Transport, Socket, ConnectionCb}};
socket(Pid, Transport, Socket, ConnectionCb) ->
#sslsocket{pid = Pid,
%% "The name "fd" is keept for backwards compatibility
- fd = {Transport, Socket, ConnectionCb}}.
-
+ fd = {Transport, Socket, ConnectionCb}}.
%% Vad göra med emulerade
setopts(gen_udp, #sslsocket{pid = {Socket, _}}, Options) ->
{SockOpts, _} = tls_socket:split_options(Options),
@@ -108,11 +111,15 @@ getstat(gen_udp, {_,Socket}, Options) ->
inet:getstat(Socket, Options);
getstat(Transport, Socket, Options) ->
Transport:getstat(Socket, Options).
+peername(udp, _) ->
+ {error, enotconn};
peername(gen_udp, {_, {Client, _Socket}}) ->
{ok, Client};
peername(Transport, Socket) ->
Transport:peername(Socket).
-sockname(gen_udp, {_,Socket}) ->
+sockname(gen_udp, {_, {_,Socket}}) ->
+ inet:sockname(Socket);
+sockname(gen_udp, Socket) ->
inet:sockname(Socket);
sockname(Transport, Socket) ->
Transport:sockname(Socket).
diff --git a/lib/ssl/src/dtls_udp_listener.erl b/lib/ssl/src/dtls_udp_listener.erl
index b7f115582e..ab3d0783bd 100644
--- a/lib/ssl/src/dtls_udp_listener.erl
+++ b/lib/ssl/src/dtls_udp_listener.erl
@@ -24,7 +24,8 @@
-behaviour(gen_server).
%% API
--export([start_link/4, active_once/3, accept/2, sockname/1]).
+-export([start_link/4, active_once/3, accept/2, sockname/1, close/1,
+ get_all_opts/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -39,7 +40,8 @@
clients = set_new(),
dtls_processes = kv_new(),
accepters = queue:new(),
- first
+ first,
+ close
}).
%%%===================================================================
@@ -53,10 +55,14 @@ active_once(UDPConnection, Client, Pid) ->
gen_server:cast(UDPConnection, {active_once, Client, Pid}).
accept(UDPConnection, Accepter) ->
- gen_server:call(UDPConnection, {accept, Accepter}, infinity).
+ call(UDPConnection, {accept, Accepter}).
sockname(UDPConnection) ->
- gen_server:call(UDPConnection, sockname, infinity).
+ call(UDPConnection, sockname).
+close(UDPConnection) ->
+ call(UDPConnection, close).
+get_all_opts(UDPConnection) ->
+ call(UDPConnection, get_all_opts).
%%%===================================================================
%%% gen_server callbacks
@@ -69,10 +75,13 @@ init([Port, EmOpts, InetOptions, DTLSOptions]) ->
first = true,
dtls_options = DTLSOptions,
emulated_options = EmOpts,
- listner = Socket}}
+ listner = Socket,
+ close = false}}
catch _:_ ->
{error, closed}
end.
+handle_call({accept, _}, _, #state{close = true} = State) ->
+ {reply, {error, closed}, State};
handle_call({accept, Accepter}, From, #state{first = true,
accepters = Accepters,
@@ -87,7 +96,21 @@ handle_call({accept, Accepter}, From, #state{accepters = Accepters} = State0) ->
{noreply, State};
handle_call(sockname, _, #state{listner = Socket} = State) ->
Reply = inet:sockname(Socket),
- {reply, Reply, State}.
+ {reply, Reply, State};
+handle_call(close, _, #state{dtls_processes = Processes,
+ accepters = Accepters} = State) ->
+ case kv_empty(Processes) of
+ true ->
+ {stop, normal, ok, State#state{close=true}};
+ false ->
+ lists:foreach(fun({_, From}) ->
+ gen_server:reply(From, {error, closed})
+ end, queue:to_list(Accepters)),
+ {reply, ok, State#state{close = true, accepters = queue:new()}}
+ end;
+handle_call(get_all_opts, _, #state{dtls_options = DTLSOptions,
+ emulated_options = EmOpts} = State) ->
+ {reply, {ok, EmOpts, DTLSOptions}, State}.
handle_cast({active_once, Client, Pid}, State0) ->
State = handle_active_once(Client, Pid, State0),
@@ -99,11 +122,17 @@ handle_info({udp, Socket, IP, InPortNo, _} = Msg, #state{listner = Socket} = Sta
{noreply, State};
handle_info({'DOWN', _, process, Pid, _}, #state{clients = Clients,
- dtls_processes = Processes0} = State) ->
+ dtls_processes = Processes0,
+ close = ListenClosed} = State) ->
Client = kv_get(Pid, Processes0),
Processes = kv_delete(Pid, Processes0),
- {noreply, State#state{clients = set_delete(Client, Clients),
- dtls_processes = Processes}}.
+ case ListenClosed andalso kv_empty(Processes) of
+ true ->
+ {stop, normal, State};
+ false ->
+ {noreply, State#state{clients = set_delete(Client, Clients),
+ dtls_processes = Processes}}
+ end.
terminate(_Reason, _State) ->
ok.
@@ -182,6 +211,7 @@ setup_new_connection(User, From, Client, Msg, #state{dtls_processes = Processes,
gen_server:reply(From, {error, Reason}),
State
end.
+
kv_update(Key, Value, Store) ->
gb_trees:update(Key, Value, Store).
kv_lookup(Key, Store) ->
@@ -194,6 +224,8 @@ kv_delete(Key, Store) ->
gb_trees:delete(Key, Store).
kv_new() ->
gb_trees:empty().
+kv_empty(Store) ->
+ gb_trees:is_empty(Store).
set_new() ->
gb_sets:empty().
@@ -203,3 +235,15 @@ set_delete(Item, Set) ->
gb_sets:delete(Item, Set).
set_is_member(Item, Set) ->
gb_sets:is_member(Item, Set).
+
+call(Server, Msg) ->
+ try
+ gen_server:call(Server, Msg, infinity)
+ catch
+ exit:{noproc, _} ->
+ {error, closed};
+ exit:{normal, _} ->
+ {error, closed};
+ exit:{{shutdown, _},_} ->
+ {error, closed}
+ end.
diff --git a/lib/ssl/src/dtls_v1.erl b/lib/ssl/src/dtls_v1.erl
index ffd3e4b833..dd0d35d404 100644
--- a/lib/ssl/src/dtls_v1.erl
+++ b/lib/ssl/src/dtls_v1.erl
@@ -21,12 +21,21 @@
-include("ssl_cipher.hrl").
--export([suites/1, mac_hash/7, ecc_curves/1, corresponding_tls_version/1, corresponding_dtls_version/1]).
+-export([suites/1, all_suites/1, mac_hash/7, ecc_curves/1,
+ corresponding_tls_version/1, corresponding_dtls_version/1]).
-spec suites(Minor:: 253|255) -> [ssl_cipher:cipher_suite()].
suites(Minor) ->
- tls_v1:suites(corresponding_minor_tls_version(Minor)).
+ lists:filter(fun(Cipher) ->
+ is_acceptable_cipher(ssl_cipher:suite_definition(Cipher))
+ end,
+ tls_v1:suites(corresponding_minor_tls_version(Minor))).
+all_suites(Version) ->
+ lists:filter(fun(Cipher) ->
+ is_acceptable_cipher(ssl_cipher:suite_definition(Cipher))
+ end,
+ ssl_cipher:all_suites(corresponding_tls_version(Version))).
mac_hash(Version, MacAlg, MacSecret, SeqNo, Type, Length, Fragment) ->
tls_v1:mac_hash(MacAlg, MacSecret, SeqNo, Type, Version,
@@ -50,3 +59,5 @@ corresponding_minor_dtls_version(2) ->
255;
corresponding_minor_dtls_version(3) ->
253.
+is_acceptable_cipher(Suite) ->
+ not ssl_cipher:is_stream_ciphersuite(Suite).
diff --git a/lib/ssl/src/ssl.app.src b/lib/ssl/src/ssl.app.src
index 9c5d795848..064dcd6892 100644
--- a/lib/ssl/src/ssl.app.src
+++ b/lib/ssl/src/ssl.app.src
@@ -10,12 +10,14 @@
tls_v1,
ssl_v3,
ssl_v2,
+ tls_connection_sup,
%% DTLS
dtls_connection,
dtls_handshake,
dtls_record,
dtls_socket,
dtls_v1,
+ dtls_connection_sup,
dtls_udp_listener,
dtls_udp_sup,
%% API
@@ -31,16 +33,19 @@
ssl_cipher,
ssl_srp_primes,
ssl_alert,
- ssl_listen_tracker_sup,
+ ssl_listen_tracker_sup, %% may be used by DTLS over SCTP
%% Erlang Distribution over SSL/TLS
inet_tls_dist,
inet6_tls_dist,
ssl_tls_dist_proxy,
ssl_dist_sup,
- %% SSL/TLS session handling
+ ssl_dist_connection_sup,
+ ssl_dist_admin_sup,
+ %% SSL/TLS session and cert handling
ssl_session,
ssl_session_cache,
ssl_manager,
+ ssl_pem_cache,
ssl_pkix_db,
ssl_certificate,
%% CRL handling
@@ -51,14 +56,14 @@
%% App structure
ssl_app,
ssl_sup,
- tls_connection_sup,
- dtls_connection_sup
+ ssl_admin_sup,
+ ssl_connection_sup
]},
{registered, [ssl_sup, ssl_manager]},
{applications, [crypto, public_key, kernel, stdlib]},
{env, []},
{mod, {ssl_app, []}},
- {runtime_dependencies, ["stdlib-3.1","public_key-1.2","kernel-3.0",
+ {runtime_dependencies, ["stdlib-3.2","public_key-1.2","kernel-3.0",
"erts-7.0","crypto-3.3", "inets-5.10.7"]}]}.
diff --git a/lib/ssl/src/ssl.appup.src b/lib/ssl/src/ssl.appup.src
index 32252386b4..bfdd0c205b 100644
--- a/lib/ssl/src/ssl.appup.src
+++ b/lib/ssl/src/ssl.appup.src
@@ -1,11 +1,19 @@
%% -*- erlang -*-
{"%VSN%",
[
- {<<"^8[.]0([.][0-9]+)?$">>, [{restart_application, ssl}]},
- {<<"^[3-7][.][^.].*">>, [{restart_application, ssl}]}
+ {<<"8\\..*">>, [{restart_application, ssl}]},
+ {<<"7\\..*">>, [{restart_application, ssl}]},
+ {<<"6\\..*">>, [{restart_application, ssl}]},
+ {<<"5\\..*">>, [{restart_application, ssl}]},
+ {<<"4\\..*">>, [{restart_application, ssl}]},
+ {<<"3\\..*">>, [{restart_application, ssl}]}
],
[
- {<<"^8[.]0([.][0-9]+)?$">>, [{restart_application, ssl}]},
- {<<"^[3-7][.][^.].*">>, [{restart_application, ssl}]}
- ]
+ {<<"8\\..*">>, [{restart_application, ssl}]},
+ {<<"7\\..*">>, [{restart_application, ssl}]},
+ {<<"6\\..*">>, [{restart_application, ssl}]},
+ {<<"5\\..*">>, [{restart_application, ssl}]},
+ {<<"4\\..*">>, [{restart_application, ssl}]},
+ {<<"3\\..*">>, [{restart_application, ssl}]}
+ ]
}.
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index c72ee44a95..ed04c7e67b 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -187,16 +187,24 @@ ssl_accept(ListenSocket, SslOptions) when is_port(ListenSocket) ->
ssl_accept(#sslsocket{} = Socket, [], Timeout) when (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
ssl_accept(Socket, Timeout);
-ssl_accept(#sslsocket{fd = {_, _, _, Tracker}} = Socket, SslOpts0, Timeout) when
+ssl_accept(#sslsocket{fd = {_, _, _, Tracker}} = Socket, SslOpts, Timeout) when
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
try
- {ok, EmOpts, InheritedSslOpts} = tls_socket:get_all_opts(Tracker),
- SslOpts = handle_options(SslOpts0, InheritedSslOpts),
+ {ok, EmOpts, _} = tls_socket:get_all_opts(Tracker),
ssl_connection:handshake(Socket, {SslOpts,
tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout)
catch
Error = {error, _Reason} -> Error
end;
+ssl_accept(#sslsocket{pid = Pid, fd = {_, _, _}} = Socket, SslOpts, Timeout) when
+ (is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
+ try
+ {ok, EmOpts, _} = dtls_udp_listener:get_all_opts(Pid),
+ ssl_connection:handshake(Socket, {SslOpts,
+ tls_socket:emulated_socket_options(EmOpts, #socket_options{})}, Timeout)
+ catch
+ Error = {error, _Reason} -> Error
+ end;
ssl_accept(Socket, SslOptions, Timeout) when is_port(Socket),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity) ->
{Transport,_,_,_} =
@@ -215,7 +223,6 @@ ssl_accept(Socket, SslOptions, Timeout) when is_port(Socket),
catch
Error = {error, _Reason} -> Error
end.
-
%%--------------------------------------------------------------------
-spec close(#sslsocket{}) -> term().
%%
@@ -223,6 +230,8 @@ ssl_accept(Socket, SslOptions, Timeout) when is_port(Socket),
%%--------------------------------------------------------------------
close(#sslsocket{pid = Pid}) when is_pid(Pid) ->
ssl_connection:close(Pid, {close, ?DEFAULT_TIMEOUT});
+close(#sslsocket{pid = {udp, #config{udp_handler = {Pid, _}}}}) ->
+ dtls_udp_listener:close(Pid);
close(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport,_, _, _}}}}) ->
Transport:close(ListenSocket).
@@ -251,6 +260,8 @@ send(#sslsocket{pid = Pid}, Data) when is_pid(Pid) ->
ssl_connection:send(Pid, Data);
send(#sslsocket{pid = {_, #config{transport_info={gen_udp, _, _, _}}}}, _) ->
{error,enotconn}; %% Emulate connection behaviour
+send(#sslsocket{pid = {udp,_}}, _) ->
+ {error,enotconn};
send(#sslsocket{pid = {ListenSocket, #config{transport_info={Transport, _, _, _}}}}, Data) ->
Transport:send(ListenSocket, Data). %% {error,enotconn}
@@ -265,6 +276,8 @@ recv(Socket, Length) ->
recv(#sslsocket{pid = Pid}, Length, Timeout) when is_pid(Pid),
(is_integer(Timeout) andalso Timeout >= 0) or (Timeout == infinity)->
ssl_connection:recv(Pid, Length, Timeout);
+recv(#sslsocket{pid = {udp,_}}, _, _) ->
+ {error,enotconn};
recv(#sslsocket{pid = {Listen,
#config{transport_info = {Transport, _, _, _}}}}, _,_) when is_port(Listen)->
Transport:recv(Listen, 0). %% {error,enotconn}
@@ -277,10 +290,14 @@ recv(#sslsocket{pid = {Listen,
%%--------------------------------------------------------------------
controlling_process(#sslsocket{pid = Pid}, NewOwner) when is_pid(Pid), is_pid(NewOwner) ->
ssl_connection:new_user(Pid, NewOwner);
+controlling_process(#sslsocket{pid = {udp, _}},
+ NewOwner) when is_pid(NewOwner) ->
+ ok; %% Meaningless but let it be allowed to conform with TLS
controlling_process(#sslsocket{pid = {Listen,
#config{transport_info = {Transport, _, _, _}}}},
NewOwner) when is_port(Listen),
is_pid(NewOwner) ->
+ %% Meaningless but let it be allowed to conform with normal sockets
Transport:controlling_process(Listen, NewOwner).
@@ -297,7 +314,9 @@ connection_information(#sslsocket{pid = Pid}) when is_pid(Pid) ->
Error
end;
connection_information(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
- {error, enotconn}.
+ {error, enotconn};
+connection_information(#sslsocket{pid = {udp,_}}) ->
+ {error,enotconn}.
%%--------------------------------------------------------------------
-spec connection_information(#sslsocket{}, [atom()]) -> {ok, list()} | {error, reason()}.
@@ -333,10 +352,18 @@ connection_info(#sslsocket{} = SSLSocket) ->
%%
%% Description: same as inet:peername/1.
%%--------------------------------------------------------------------
+peername(#sslsocket{pid = Pid, fd = {Transport, Socket, _}}) when is_pid(Pid)->
+ dtls_socket:peername(Transport, Socket);
peername(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}) when is_pid(Pid)->
tls_socket:peername(Transport, Socket);
+peername(#sslsocket{pid = {udp = Transport, #config{udp_handler = {_Pid, _}}}}) ->
+ dtls_socket:peername(Transport, undefined);
+peername(#sslsocket{pid = Pid, fd = {gen_udp= Transport, Socket, _, _}}) when is_pid(Pid) ->
+ dtls_socket:peername(Transport, Socket);
peername(#sslsocket{pid = {ListenSocket, #config{transport_info = {Transport,_,_,_}}}}) ->
- tls_socket:peername(Transport, ListenSocket). %% Will return {error, enotconn}
+ tls_socket:peername(Transport, ListenSocket); %% Will return {error, enotconn}
+peername(#sslsocket{pid = {udp,_}}) ->
+ {error,enotconn}.
%%--------------------------------------------------------------------
-spec peercert(#sslsocket{}) ->{ok, DerCert::binary()} | {error, reason()}.
@@ -350,6 +377,8 @@ peercert(#sslsocket{pid = Pid}) when is_pid(Pid) ->
Result ->
Result
end;
+peercert(#sslsocket{pid = {udp, _}}) ->
+ {error, enotconn};
peercert(#sslsocket{pid = {Listen, _}}) when is_port(Listen) ->
{error, enotconn}.
@@ -506,6 +535,8 @@ getstat(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}, Options) when is_
shutdown(#sslsocket{pid = {Listen, #config{transport_info = {Transport,_, _, _}}}},
How) when is_port(Listen) ->
Transport:shutdown(Listen, How);
+shutdown(#sslsocket{pid = {udp,_}},_) ->
+ {error, enotconn};
shutdown(#sslsocket{pid = Pid}, How) ->
ssl_connection:shutdown(Pid, How).
@@ -518,7 +549,7 @@ sockname(#sslsocket{pid = {Listen, #config{transport_info = {Transport, _, _, _
tls_socket:sockname(Transport, Listen);
sockname(#sslsocket{pid = {udp, #config{udp_handler = {Pid, _}}}}) ->
dtls_udp_listener:sockname(Pid);
-sockname(#sslsocket{pid = Pid, fd = {gen_udp= Transport, Socket, _, _}}) when is_pid(Pid) ->
+sockname(#sslsocket{pid = Pid, fd = {Transport, Socket, _}}) when is_pid(Pid) ->
dtls_socket:sockname(Transport, Socket);
sockname(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}) when is_pid(Pid) ->
tls_socket:sockname(Transport, Socket).
@@ -531,6 +562,8 @@ sockname(#sslsocket{pid = Pid, fd = {Transport, Socket, _, _}}) when is_pid(Pid)
%%--------------------------------------------------------------------
session_info(#sslsocket{pid = Pid}) when is_pid(Pid) ->
ssl_connection:session_info(Pid);
+session_info(#sslsocket{pid = {udp,_}}) ->
+ {error, enotconn};
session_info(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
{error, enotconn}.
@@ -555,6 +588,8 @@ versions() ->
%%--------------------------------------------------------------------
renegotiate(#sslsocket{pid = Pid}) when is_pid(Pid) ->
ssl_connection:renegotiation(Pid);
+renegotiate(#sslsocket{pid = {udp,_}}) ->
+ {error, enotconn};
renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
{error, enotconn}.
@@ -568,6 +603,8 @@ renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
prf(#sslsocket{pid = Pid},
Secret, Label, Seed, WantedLength) when is_pid(Pid) ->
ssl_connection:prf(Pid, Secret, Label, Seed, WantedLength);
+prf(#sslsocket{pid = {udp,_}}, _,_,_,_) ->
+ {error, enotconn};
prf(#sslsocket{pid = {Listen,_}}, _,_,_,_) when is_port(Listen) ->
{error, enotconn}.
@@ -577,7 +614,7 @@ prf(#sslsocket{pid = {Listen,_}}, _,_,_,_) when is_port(Listen) ->
%% Description: Clear the PEM cache
%%--------------------------------------------------------------------
clear_pem_cache() ->
- ssl_manager:clear_pem_cache().
+ ssl_pem_cache:clear().
%%---------------------------------------------------------------
-spec format_error({error, term()}) -> list().
@@ -696,7 +733,7 @@ handle_options(Opts0, Role) ->
[RecordCb:protocol_version(Vsn) || Vsn <- Vsns]
end,
- Protocol = proplists:get_value(protocol, Opts, tls),
+ Protocol = handle_option(protocol, Opts, tls),
SSLOptions = #ssl_options{
versions = Versions,
@@ -755,7 +792,7 @@ handle_options(Opts0, Role) ->
honor_ecc_order = handle_option(honor_ecc_order, Opts,
default_option_role(server, false, Role),
server, Role),
- protocol = Protocol,
+ protocol = Protocol,
padding_check = proplists:get_value(padding_check, Opts, true),
beast_mitigation = handle_option(beast_mitigation, Opts, one_n_minus_one),
fallback = handle_option(fallback, Opts,
@@ -765,7 +802,8 @@ handle_options(Opts0, Role) ->
client, Role),
crl_check = handle_option(crl_check, Opts, false),
crl_cache = handle_option(crl_cache, Opts, {ssl_crl_cache, {internal, []}}),
- v2_hello_compatible = handle_option(v2_hello_compatible, Opts, false)
+ v2_hello_compatible = handle_option(v2_hello_compatible, Opts, false),
+ max_handshake_size = handle_option(max_handshake_size, Opts, ?DEFAULT_MAX_HANDSHAKE_SIZE)
},
CbInfo = proplists:get_value(cb_info, Opts, default_cb_info(Protocol)),
@@ -780,7 +818,8 @@ handle_options(Opts0, Role) ->
alpn_preferred_protocols, next_protocols_advertised,
client_preferred_next_protocols, log_alert,
server_name_indication, honor_cipher_order, padding_check, crl_check, crl_cache,
- fallback, signature_algs, eccs, honor_ecc_order, beast_mitigation, v2_hello_compatible],
+ fallback, signature_algs, eccs, honor_ecc_order, beast_mitigation, v2_hello_compatible,
+ max_handshake_size],
SockOpts = lists:foldl(fun(Key, PropList) ->
proplists:delete(Key, PropList)
@@ -1028,6 +1067,12 @@ validate_option(beast_mitigation, Value) when Value == one_n_minus_one orelse
Value;
validate_option(v2_hello_compatible, Value) when is_boolean(Value) ->
Value;
+validate_option(max_handshake_size, Value) when is_integer(Value) andalso Value =< ?MAX_UNIT24 ->
+ Value;
+validate_option(protocol, Value = tls) ->
+ Value;
+validate_option(protocol, Value = dtls) ->
+ Value;
validate_option(Opt, Value) ->
throw({error, {options, {Opt, Value}}}).
@@ -1065,17 +1110,37 @@ validate_binary_list(Opt, List) ->
(Bin) ->
throw({error, {options, {Opt, {invalid_protocol, Bin}}}})
end, List).
-
validate_versions([], Versions) ->
Versions;
validate_versions([Version | Rest], Versions) when Version == 'tlsv1.2';
Version == 'tlsv1.1';
Version == tlsv1;
Version == sslv3 ->
- validate_versions(Rest, Versions);
+ tls_validate_versions(Rest, Versions);
+validate_versions([Version | Rest], Versions) when Version == 'dtlsv1';
+ Version == 'dtlsv2'->
+ dtls_validate_versions(Rest, Versions);
validate_versions([Ver| _], Versions) ->
throw({error, {options, {Ver, {versions, Versions}}}}).
+tls_validate_versions([], Versions) ->
+ Versions;
+tls_validate_versions([Version | Rest], Versions) when Version == 'tlsv1.2';
+ Version == 'tlsv1.1';
+ Version == tlsv1;
+ Version == sslv3 ->
+ tls_validate_versions(Rest, Versions);
+tls_validate_versions([Ver| _], Versions) ->
+ throw({error, {options, {Ver, {versions, Versions}}}}).
+
+dtls_validate_versions([], Versions) ->
+ Versions;
+dtls_validate_versions([Version | Rest], Versions) when Version == 'dtlsv1';
+ Version == 'dtlsv2'->
+ dtls_validate_versions(Rest, Versions);
+dtls_validate_versions([Ver| _], Versions) ->
+ throw({error, {options, {Ver, {versions, Versions}}}}).
+
validate_inet_option(mode, Value)
when Value =/= list, Value =/= binary ->
throw({error, {options, {mode,Value}}});
@@ -1147,18 +1212,18 @@ handle_cipher_option(Value, Version) when is_list(Value) ->
binary_cipher_suites(Version, []) ->
%% Defaults to all supported suites that does
%% not require explicit configuration
- ssl_cipher:filter_suites(ssl_cipher:suites(Version));
+ ssl_cipher:filter_suites(ssl_cipher:suites(tls_version(Version)));
binary_cipher_suites(Version, [Tuple|_] = Ciphers0) when is_tuple(Tuple) ->
Ciphers = [ssl_cipher:suite(C) || C <- Ciphers0],
binary_cipher_suites(Version, Ciphers);
binary_cipher_suites(Version, [Cipher0 | _] = Ciphers0) when is_binary(Cipher0) ->
- All = ssl_cipher:all_suites(Version),
+ All = ssl_cipher:all_suites(tls_version(Version)),
case [Cipher || Cipher <- Ciphers0, lists:member(Cipher, All)] of
[] ->
%% Defaults to all supported suites that does
%% not require explicit configuration
- ssl_cipher:filter_suites(ssl_cipher:suites(Version));
+ ssl_cipher:filter_suites(ssl_cipher:suites(tls_version(Version)));
Ciphers ->
Ciphers
end;
@@ -1171,7 +1236,8 @@ binary_cipher_suites(Version, Ciphers0) ->
Ciphers = [ssl_cipher:openssl_suite(C) || C <- string:tokens(Ciphers0, ":")],
binary_cipher_suites(Version, Ciphers).
-handle_eccs_option(Value, {_Major, Minor}) when is_list(Value) ->
+handle_eccs_option(Value, Version) when is_list(Value) ->
+ {_Major, Minor} = tls_version(Version),
try tls_v1:ecc_curves(Minor, Value) of
Curves -> #elliptic_curves{elliptic_curve_list = Curves}
catch
@@ -1344,7 +1410,10 @@ new_ssl_options([{signature_algs, Value} | Rest], #ssl_options{} = Opts, RecordC
handle_hashsigns_option(Value,
tls_version(RecordCB:highest_protocol_version()))},
RecordCB);
-
+new_ssl_options([{protocol, dtls = Value} | Rest], #ssl_options{} = Opts, dtls_record = RecordCB) ->
+ new_ssl_options(Rest, Opts#ssl_options{protocol = Value}, RecordCB);
+new_ssl_options([{protocol, tls = Value} | Rest], #ssl_options{} = Opts, tls_record = RecordCB) ->
+ new_ssl_options(Rest, Opts#ssl_options{protocol = Value}, RecordCB);
new_ssl_options([{Key, Value} | _Rest], #ssl_options{}, _) ->
throw({error, {options, {Key, Value}}}).
diff --git a/lib/ssl/src/ssl_admin_sup.erl b/lib/ssl/src/ssl_admin_sup.erl
new file mode 100644
index 0000000000..9c96435753
--- /dev/null
+++ b/lib/ssl/src/ssl_admin_sup.erl
@@ -0,0 +1,95 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_admin_sup).
+
+-behaviour(supervisor).
+
+%% API
+-export([start_link/0, manager_opts/0]).
+
+%% Supervisor callback
+-export([init/1]).
+
+%%%=========================================================================
+%%% API
+%%%=========================================================================
+
+-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%=========================================================================
+%%% Supervisor callback
+%%%=========================================================================
+
+init([]) ->
+ PEMCache = pem_cache_child_spec(),
+ SessionCertManager = session_and_cert_manager_child_spec(),
+ {ok, {{rest_for_one, 10, 3600}, [PEMCache, SessionCertManager]}}.
+
+manager_opts() ->
+ CbOpts = case application:get_env(ssl, session_cb) of
+ {ok, Cb} when is_atom(Cb) ->
+ InitArgs = session_cb_init_args(),
+ [{session_cb, Cb}, {session_cb_init_args, InitArgs}];
+ _ ->
+ []
+ end,
+ case application:get_env(ssl, session_lifetime) of
+ {ok, Time} when is_integer(Time) ->
+ [{session_lifetime, Time}| CbOpts];
+ _ ->
+ CbOpts
+ end.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+pem_cache_child_spec() ->
+ Name = ssl_pem_cache,
+ StartFunc = {ssl_pem_cache, start_link, [[]]},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_pem_cache],
+ Type = worker,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+session_and_cert_manager_child_spec() ->
+ Opts = manager_opts(),
+ Name = ssl_manager,
+ StartFunc = {ssl_manager, start_link, [Opts]},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_manager],
+ Type = worker,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+session_cb_init_args() ->
+ case application:get_env(ssl, session_cb_init_args) of
+ {ok, Args} when is_list(Args) ->
+ Args;
+ _ ->
+ []
+ end.
diff --git a/lib/ssl/src/ssl_certificate.erl b/lib/ssl/src/ssl_certificate.erl
index f359655d85..8aa2aa4081 100644
--- a/lib/ssl/src/ssl_certificate.erl
+++ b/lib/ssl/src/ssl_certificate.erl
@@ -125,21 +125,21 @@ file_to_crls(File, DbHandle) ->
%% Description: Validates ssl/tls specific extensions
%%--------------------------------------------------------------------
validate(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage',
- extnValue = KeyUse}}, {Role, _,_, _, _}) ->
+ extnValue = KeyUse}}, UserState = {Role, _,_, _, _}) ->
case is_valid_extkey_usage(KeyUse, Role) of
true ->
- {valid, Role};
+ {valid, UserState};
false ->
{fail, {bad_cert, invalid_ext_key_usage}}
end;
-validate(_, {extension, _}, Role) ->
- {unknown, Role};
+validate(_, {extension, _}, UserState) ->
+ {unknown, UserState};
validate(_, {bad_cert, _} = Reason, _) ->
{fail, Reason};
-validate(_, valid, Role) ->
- {valid, Role};
-validate(_, valid_peer, Role) ->
- {valid, Role}.
+validate(_, valid, UserState) ->
+ {valid, UserState};
+validate(_, valid_peer, UserState) ->
+ {valid, UserState}.
%%--------------------------------------------------------------------
-spec is_valid_key_usage(list(), term()) -> boolean().
diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl
index 32fec03b8e..8e6860e9dc 100644
--- a/lib/ssl/src/ssl_cipher.erl
+++ b/lib/ssl/src/ssl_cipher.erl
@@ -40,7 +40,8 @@
ec_keyed_suites/0, anonymous_suites/1, psk_suites/1, srp_suites/0,
rc4_suites/1, des_suites/1, openssl_suite/1, openssl_suite_name/1, filter/2, filter_suites/1,
hash_algorithm/1, sign_algorithm/1, is_acceptable_hash/2, is_fallback/1,
- random_bytes/1, calc_aad/3, calc_mac_hash/4]).
+ random_bytes/1, calc_aad/3, calc_mac_hash/4,
+ is_stream_ciphersuite/1]).
-export_type([cipher_suite/0,
erl_cipher_suite/0, openssl_cipher_suite/0,
@@ -310,18 +311,21 @@ aead_decipher(Type, #cipher_state{key = Key, iv = IV} = CipherState,
%%--------------------------------------------------------------------
suites({3, 0}) ->
ssl_v3:suites();
-suites({3, N}) ->
- tls_v1:suites(N);
-suites(Version) ->
- suites(dtls_v1:corresponding_tls_version(Version)).
+suites({3, Minor}) ->
+ tls_v1:suites(Minor);
+suites({_, Minor}) ->
+ dtls_v1:suites(Minor).
-all_suites(Version) ->
+all_suites({3, _} = Version) ->
suites(Version)
++ anonymous_suites(Version)
++ psk_suites(Version)
++ srp_suites()
++ rc4_suites(Version)
- ++ des_suites(Version).
+ ++ des_suites(Version);
+all_suites(Version) ->
+ dtls_v1:all_suites(Version).
+
%%--------------------------------------------------------------------
-spec anonymous_suites(ssl_record:ssl_version() | integer()) -> [cipher_suite()].
%%
@@ -1541,6 +1545,10 @@ calc_mac_hash(Type, Version,
MacSecret, SeqNo, Type,
Length, PlainFragment).
+is_stream_ciphersuite({_, rc4_128, _, _}) ->
+ true;
+is_stream_ciphersuite(_) ->
+ false.
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
diff --git a/lib/ssl/src/ssl_config.erl b/lib/ssl/src/ssl_config.erl
index 0652d029c3..09d4c3e678 100644
--- a/lib/ssl/src/ssl_config.erl
+++ b/lib/ssl/src/ssl_config.erl
@@ -32,18 +32,20 @@ init(SslOpts, Role) ->
init_manager_name(SslOpts#ssl_options.erl_dist),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbHandle, OwnCert}
+ {ok, #{pem_cache := PemCache} = Config}
= init_certificates(SslOpts, Role),
PrivateKey =
- init_private_key(PemCacheHandle, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile,
+ init_private_key(PemCache, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile,
SslOpts#ssl_options.password, Role),
- DHParams = init_diffie_hellman(PemCacheHandle, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, PrivateKey, DHParams}.
+ DHParams = init_diffie_hellman(PemCache, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role),
+ {ok, Config#{private_key => PrivateKey, dh_params => DHParams}}.
init_manager_name(false) ->
- put(ssl_manager, ssl_manager:manager_name(normal));
+ put(ssl_manager, ssl_manager:name(normal)),
+ put(ssl_pem_cache, ssl_pem_cache:name(normal));
init_manager_name(true) ->
- put(ssl_manager, ssl_manager:manager_name(dist)).
+ put(ssl_manager, ssl_manager:name(dist)),
+ put(ssl_pem_cache, ssl_pem_cache:name(dist)).
init_certificates(#ssl_options{cacerts = CaCerts,
cacertfile = CACertFile,
@@ -51,7 +53,7 @@ init_certificates(#ssl_options{cacerts = CaCerts,
cert = Cert,
crl_cache = CRLCache
}, Role) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo} =
+ {ok, Config} =
try
Certs = case CaCerts of
undefined ->
@@ -59,41 +61,37 @@ init_certificates(#ssl_options{cacerts = CaCerts,
_ ->
{der, CaCerts}
end,
- {ok, _, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role, CRLCache)
+ {ok,_} = ssl_manager:connection_init(Certs, Role, CRLCache)
catch
_:Reason ->
file_error(CACertFile, {cacertfile, Reason})
end,
- init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CRLDbInfo, CertFile, Role).
+ init_certificates(Cert, Config, CertFile, Role).
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle,
- CRLDbInfo, <<>>, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined};
+init_certificates(undefined, Config, <<>>, _) ->
+ {ok, Config#{own_certificate => undefined}};
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CRLDbInfo, CertFile, client) ->
+init_certificates(undefined, #{pem_cache := PemCache} = Config, CertFile, client) ->
try
%% Ignoring potential proxy-certificates see:
%% http://dev.globus.org/wiki/Security/ProxyFileFormat
- [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, OwnCert}
+ [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCache),
+ {ok, Config#{own_certificate => OwnCert}}
catch _Error:_Reason ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined}
- end;
+ {ok, Config#{own_certificate => undefined}}
+ end;
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle,
- PemCacheHandle, CacheRef, CRLDbInfo, CertFile, server) ->
+init_certificates(undefined, #{pem_cache := PemCache} = Config, CertFile, server) ->
try
- [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, OwnCert}
+ [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCache),
+ {ok, Config#{own_certificate => OwnCert}}
catch
_:Reason ->
file_error(CertFile, {certfile, Reason})
end;
-init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, _, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, Cert}.
-
+init_certificates(Cert, Config, _, _) ->
+ {ok, Config#{own_certificate => Cert}}.
+
init_private_key(_, undefined, <<>>, _Password, _Client) ->
undefined;
init_private_key(DbHandle, undefined, KeyFile, Password, _) ->
@@ -135,6 +133,8 @@ file_error(File, Throw) ->
case Throw of
{Opt,{badmatch, {error, {badmatch, Error}}}} ->
throw({options, {Opt, binary_to_list(File), Error}});
+ {Opt, {badmatch, Error}} ->
+ throw({options, {Opt, binary_to_list(File), Error}});
_ ->
throw(Throw)
end.
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index 6ed2fc83da..ea139ac4b1 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -148,19 +148,19 @@ socket_control(Connection, Socket, Pid, Transport) ->
%%--------------------------------------------------------------------
socket_control(Connection, Socket, Pid, Transport, udp_listner) ->
%% dtls listner process must have the socket control
- {ok, dtls_socket:socket(Pid, Transport, Socket, Connection)};
+ {ok, Connection:socket(Pid, Transport, Socket, Connection, undefined)};
socket_control(tls_connection = Connection, Socket, Pid, Transport, ListenTracker) ->
case Transport:controlling_process(Socket, Pid) of
ok ->
- {ok, tls_socket:socket(Pid, Transport, Socket, Connection, ListenTracker)};
+ {ok, Connection:socket(Pid, Transport, Socket, Connection, ListenTracker)};
{error, Reason} ->
{error, Reason}
end;
socket_control(dtls_connection = Connection, {_, Socket}, Pid, Transport, ListenTracker) ->
case Transport:controlling_process(Socket, Pid) of
ok ->
- {ok, tls_socket:socket(Pid, Transport, Socket, Connection, ListenTracker)};
+ {ok, Connection:socket(Pid, Transport, Socket, Connection, ListenTracker)};
{error, Reason} ->
{error, Reason}
end.
@@ -323,8 +323,14 @@ handle_session(#server_hello{cipher_suite = CipherSuite,
-spec ssl_config(#ssl_options{}, client | server, #state{}) -> #state{}.
%%--------------------------------------------------------------------
ssl_config(Opts, Role, State) ->
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo,
- OwnCert, Key, DHParams} =
+ {ok, #{cert_db_ref := Ref,
+ cert_db_handle := CertDbHandle,
+ fileref_db_handle := FileRefHandle,
+ session_cache := CacheHandle,
+ crl_db_info := CRLDbHandle,
+ private_key := Key,
+ dh_params := DHParams,
+ own_certificate := OwnCert}} =
ssl_config:init(Opts, Role),
Handshake = ssl_handshake:init_handshake_history(),
TimeStamp = erlang:monotonic_time(),
@@ -335,7 +341,7 @@ ssl_config(Opts, Role, State) ->
file_ref_db = FileRefHandle,
cert_db_ref = Ref,
cert_db = CertDbHandle,
- crl_db = CRLDbInfo,
+ crl_db = CRLDbHandle,
session_cache = CacheHandle,
private_key = Key,
diffie_hellman_params = DHParams,
@@ -357,11 +363,13 @@ init({call, From}, {start, Timeout}, State0, Connection) ->
timer = Timer}),
Connection:next_event(hello, Record, State);
init({call, From}, {start, {Opts, EmOpts}, Timeout},
- #state{role = Role} = State0, Connection) ->
+ #state{role = Role, ssl_options = OrigSSLOptions,
+ socket_options = SockOpts} = State0, Connection) ->
try
- State = ssl_config(Opts, Role, State0),
+ SslOpts = ssl:handle_options(Opts, OrigSSLOptions),
+ State = ssl_config(SslOpts, Role, State0),
init({call, From}, {start, Timeout},
- State#state{ssl_options = Opts, socket_options = EmOpts}, Connection)
+ State#state{ssl_options = SslOpts, socket_options = new_emulated(EmOpts, SockOpts)}, Connection)
catch throw:Error ->
{stop_and_reply, normal, {reply, From, {error, Error}}}
end;
@@ -426,11 +434,11 @@ abbreviated(internal, #finished{verify_data = Data} = Finished,
verified ->
ConnectionStates1 =
ssl_record:set_server_verify_data(current_read, Data, ConnectionStates0),
- State1 =
+ {State1, Actions} =
finalize_handshake(State0#state{connection_states = ConnectionStates1},
abbreviated, Connection),
{Record, State} = prepare_connection(State1#state{expecting_finished = false}, Connection),
- Connection:next_event(connection, Record, State);
+ Connection:next_event(connection, Record, State, Actions);
#alert{} = Alert ->
handle_own_alert(Alert, Version, abbreviated, State0)
end;
@@ -850,6 +858,7 @@ handle_common_event(internal, #change_cipher_spec{type = <<1>>}, StateName,
StateName, State);
handle_common_event(_Type, Msg, StateName, #state{negotiated_version = Version} = State,
_) ->
+ ct:pal("Unexpected msg ~p", [Msg]),
Alert = ?ALERT_REC(?FATAL,?UNEXPECTED_MESSAGE),
handle_own_alert(Alert, Version, {StateName, Msg}, State).
@@ -1011,7 +1020,7 @@ terminate(_, _, #state{terminated = true}) ->
%% Happens when user closes the connection using ssl:close/1
%% we want to guarantee that Transport:close has been called
%% when ssl:close/1 returns unless it is a downgrade where
- %% we want to guarantee that close alert is recived before
+ %% we want to guarantee that close alert is received before
%% returning. In both cases terminate has been run manually
%% before run by gen_statem which will end up here
ok;
@@ -1230,13 +1239,13 @@ new_server_hello(#server_hello{cipher_suite = CipherSuite,
negotiated_version = Version} = State0, Connection) ->
try server_certify_and_key_exchange(State0, Connection) of
#state{} = State1 ->
- State2 = server_hello_done(State1, Connection),
+ {State2, Actions} = server_hello_done(State1, Connection),
Session =
Session0#session{session_id = SessionId,
cipher_suite = CipherSuite,
compression_method = Compression},
{Record, State} = Connection:next_record(State2#state{session = Session}),
- Connection:next_event(certify, Record, State)
+ Connection:next_event(certify, Record, State, Actions)
catch
#alert{} = Alert ->
handle_own_alert(Alert, Version, hello, State0)
@@ -1251,10 +1260,10 @@ resumed_server_hello(#state{session = Session,
{_, ConnectionStates1} ->
State1 = State0#state{connection_states = ConnectionStates1,
session = Session},
- State2 =
+ {State2, Actions} =
finalize_handshake(State1, abbreviated, Connection),
{Record, State} = Connection:next_record(State2),
- Connection:next_event(abbreviated, Record, State);
+ Connection:next_event(abbreviated, Record, State, Actions);
#alert{} = Alert ->
handle_own_alert(Alert, Version, hello, State0)
end.
@@ -1337,12 +1346,12 @@ client_certify_and_key_exchange(#state{negotiated_version = Version} =
State0, Connection) ->
try do_client_certify_and_key_exchange(State0, Connection) of
State1 = #state{} ->
- State2 = finalize_handshake(State1, certify, Connection),
+ {State2, Actions} = finalize_handshake(State1, certify, Connection),
State3 = State2#state{
%% Reinitialize
client_certificate_requested = false},
{Record, State} = Connection:next_record(State3),
- Connection:next_event(cipher, Record, State)
+ Connection:next_event(cipher, Record, State, Actions)
catch
throw:#alert{} = Alert ->
handle_own_alert(Alert, Version, certify, State0)
@@ -1864,11 +1873,11 @@ cipher_role(server, Data, Session, #state{connection_states = ConnectionStates0
Connection) ->
ConnectionStates1 = ssl_record:set_client_verify_data(current_read, Data,
ConnectionStates0),
- State1 =
+ {State1, Actions} =
finalize_handshake(State0#state{connection_states = ConnectionStates1,
session = Session}, cipher, Connection),
{Record, State} = prepare_connection(State1, Connection),
- Connection:next_event(connection, Record, State).
+ Connection:next_event(connection, Record, State, Actions).
is_anonymous(Algo) when Algo == dh_anon;
Algo == ecdh_anon;
@@ -2299,7 +2308,7 @@ format_reply(_, _,#socket_options{active = false, mode = Mode, packet = Packet,
{ok, do_format_reply(Mode, Packet, Header, Data)};
format_reply(Transport, Socket, #socket_options{active = _, mode = Mode, packet = Packet,
header = Header}, Data, Tracker, Connection) ->
- {ssl, tls_socket:socket(self(), Transport, Socket, Connection, Tracker),
+ {ssl, Connection:socket(self(), Transport, Socket, Connection, Tracker),
do_format_reply(Mode, Packet, Header, Data)}.
deliver_packet_error(Transport, Socket, SO= #socket_options{active = Active}, Data, Pid, From, Tracker, Connection) ->
@@ -2308,7 +2317,7 @@ deliver_packet_error(Transport, Socket, SO= #socket_options{active = Active}, Da
format_packet_error(_, _,#socket_options{active = false, mode = Mode}, Data, _, _) ->
{error, {invalid_packet, do_format_reply(Mode, raw, 0, Data)}};
format_packet_error(Transport, Socket, #socket_options{active = _, mode = Mode}, Data, Tracker, Connection) ->
- {ssl_error, tls_socket:socket(self(), Transport, Socket, Connection, Tracker),
+ {ssl_error, Connection:socket(self(), Transport, Socket, Connection, Tracker),
{invalid_packet, do_format_reply(Mode, raw, 0, Data)}}.
do_format_reply(binary, _, N, Data) when N > 0 -> % Header mode
@@ -2363,11 +2372,11 @@ alert_user(Transport, Tracker, Socket, Active, Pid, From, Alert, Role, Connectio
case ssl_alert:reason_code(Alert, Role) of
closed ->
send_or_reply(Active, Pid, From,
- {ssl_closed, tls_socket:socket(self(),
+ {ssl_closed, Connection:socket(self(),
Transport, Socket, Connection, Tracker)});
ReasonCode ->
send_or_reply(Active, Pid, From,
- {ssl_error, tls_socket:socket(self(),
+ {ssl_error, Connection:socket(self(),
Transport, Socket, Connection, Tracker), ReasonCode})
end.
@@ -2428,16 +2437,23 @@ handle_sni_extension(#sni{hostname = Hostname}, State0) ->
undefined ->
State0;
_ ->
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, Key, DHParams} =
- ssl_config:init(NewOptions, State0#state.role),
- State0#state{
- session = State0#state.session#session{own_certificate = OwnCert},
- file_ref_db = FileRefHandle,
- cert_db_ref = Ref,
- cert_db = CertDbHandle,
- crl_db = CRLDbHandle,
- session_cache = CacheHandle,
- private_key = Key,
+ {ok, #{cert_db_ref := Ref,
+ cert_db_handle := CertDbHandle,
+ fileref_db_handle := FileRefHandle,
+ session_cache := CacheHandle,
+ crl_db_info := CRLDbHandle,
+ private_key := Key,
+ dh_params := DHParams,
+ own_certificate := OwnCert}} =
+ ssl_config:init(NewOptions, State0#state.role),
+ State0#state{
+ session = State0#state.session#session{own_certificate = OwnCert},
+ file_ref_db = FileRefHandle,
+ cert_db_ref = Ref,
+ cert_db = CertDbHandle,
+ crl_db = CRLDbHandle,
+ session_cache = CacheHandle,
+ private_key = Key,
diffie_hellman_params = DHParams,
ssl_options = NewOptions,
sni_hostname = Hostname
@@ -2459,3 +2475,8 @@ update_ssl_options_from_sni(OrigSSLOptions, SNIHostname) ->
_ ->
ssl:handle_options(SSLOption, OrigSSLOptions)
end.
+
+new_emulated([], EmOpts) ->
+ EmOpts;
+new_emulated(NewEmOpts, _) ->
+ NewEmOpts.
diff --git a/lib/ssl/src/ssl_connection_sup.erl b/lib/ssl/src/ssl_connection_sup.erl
new file mode 100644
index 0000000000..1a1f43e683
--- /dev/null
+++ b/lib/ssl/src/ssl_connection_sup.erl
@@ -0,0 +1,101 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_connection_sup).
+
+-behaviour(supervisor).
+
+%% API
+-export([start_link/0]).
+
+%% Supervisor callback
+-export([init/1]).
+
+%%%=========================================================================
+%%% API
+%%%=========================================================================
+
+-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%=========================================================================
+%%% Supervisor callback
+%%%=========================================================================
+
+init([]) ->
+
+ TLSConnetionManager = tls_connection_manager_child_spec(),
+ %% Handles emulated options so that they inherited by the accept
+ %% socket, even when setopts is performed on the listen socket
+ ListenOptionsTracker = listen_options_tracker_child_spec(),
+
+ DTLSConnetionManager = dtls_connection_manager_child_spec(),
+ DTLSUdpListeners = dtls_udp_listeners_spec(),
+
+ {ok, {{one_for_one, 10, 3600}, [TLSConnetionManager,
+ ListenOptionsTracker,
+ DTLSConnetionManager,
+ DTLSUdpListeners
+ ]}}.
+
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+tls_connection_manager_child_spec() ->
+ Name = tls_connection,
+ StartFunc = {tls_connection_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [tls_connection_sup],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+dtls_connection_manager_child_spec() ->
+ Name = dtls_connection,
+ StartFunc = {dtls_connection_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [dtls_connection_sup],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+listen_options_tracker_child_spec() ->
+ Name = tls_socket,
+ StartFunc = {ssl_listen_tracker_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [tls_socket],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+dtls_udp_listeners_spec() ->
+ Name = dtls_udp_listener,
+ StartFunc = {dtls_udp_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
diff --git a/lib/ssl/src/ssl_crl.erl b/lib/ssl/src/ssl_crl.erl
index fc60bdba67..33375b5e09 100644
--- a/lib/ssl/src/ssl_crl.erl
+++ b/lib/ssl/src/ssl_crl.erl
@@ -29,7 +29,7 @@
-export([trusted_cert_and_path/3]).
-trusted_cert_and_path(CRL, {SerialNumber, Issuer},{Db, DbRef} = DbHandle) ->
+trusted_cert_and_path(CRL, {SerialNumber, Issuer},{_, {Db, DbRef}} = DbHandle) ->
case ssl_pkix_db:lookup_trusted_cert(Db, DbRef, SerialNumber, Issuer) of
undefined ->
trusted_cert_and_path(CRL, issuer_not_found, DbHandle);
@@ -37,17 +37,34 @@ trusted_cert_and_path(CRL, {SerialNumber, Issuer},{Db, DbRef} = DbHandle) ->
{ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
{ok, Root, lists:reverse(Chain)}
end;
-
-trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef} = DbHandle) ->
- case find_issuer(CRL, DbHandle) of
+trusted_cert_and_path(CRL, issuer_not_found, {CertPath, {Db, DbRef}}) ->
+ case find_issuer(CRL, {certpath,
+ [{Der, public_key:pkix_decode_cert(Der,otp)} || Der <- CertPath]}) of
{ok, OtpCert} ->
{ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
{ok, Root, lists:reverse(Chain)};
{error, issuer_not_found} ->
- {ok, unknown_crl_ca, []}
- end.
+ trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef})
+ end;
+trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef} = DbInfo) ->
+ case find_issuer(CRL, DbInfo) of
+ {ok, OtpCert} ->
+ {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
+ {ok, Root, lists:reverse(Chain)};
+ {error, issuer_not_found} ->
+ {error, unknown_ca}
+ end.
-find_issuer(CRL, {Db,DbRef}) ->
+find_issuer(CRL, {certpath = Db, DbRef}) ->
+ Issuer = public_key:pkix_normalize_name(public_key:pkix_crl_issuer(CRL)),
+ IsIssuerFun =
+ fun({_Der,ErlCertCandidate}, Acc) ->
+ verify_crl_issuer(CRL, ErlCertCandidate, Issuer, Acc);
+ (_, Acc) ->
+ Acc
+ end,
+ find_issuer(IsIssuerFun, Db, DbRef);
+find_issuer(CRL, {Db, DbRef}) ->
Issuer = public_key:pkix_normalize_name(public_key:pkix_crl_issuer(CRL)),
IsIssuerFun =
fun({_Key, {_Der,ErlCertCandidate}}, Acc) ->
@@ -55,26 +72,33 @@ find_issuer(CRL, {Db,DbRef}) ->
(_, Acc) ->
Acc
end,
- if is_reference(DbRef) -> % actual DB exists
- try ssl_pkix_db:foldl(IsIssuerFun, issuer_not_found, Db) of
- issuer_not_found ->
- {error, issuer_not_found}
- catch
- {ok, _} = Result ->
- Result
- end;
- is_tuple(DbRef), element(1,DbRef) =:= extracted -> % cache bypass byproduct
- {extracted, CertsData} = DbRef,
- Certs = [Entry || {decoded, Entry} <- CertsData],
- try lists:foldl(IsIssuerFun, issuer_not_found, Certs) of
- issuer_not_found ->
- {error, issuer_not_found}
- catch
- {ok, _} = Result ->
- Result
- end
- end.
+ find_issuer(IsIssuerFun, Db, DbRef).
+find_issuer(IsIssuerFun, certpath, Certs) ->
+ try lists:foldl(IsIssuerFun, issuer_not_found, Certs) of
+ issuer_not_found ->
+ {error, issuer_not_found}
+ catch
+ {ok, _} = Result ->
+ Result
+ end;
+find_issuer(IsIssuerFun, extracted, CertsData) ->
+ Certs = [Entry || {decoded, Entry} <- CertsData],
+ try lists:foldl(IsIssuerFun, issuer_not_found, Certs) of
+ issuer_not_found ->
+ {error, issuer_not_found}
+ catch
+ {ok, _} = Result ->
+ Result
+ end;
+find_issuer(IsIssuerFun, Db, _) ->
+ try ssl_pkix_db:foldl(IsIssuerFun, issuer_not_found, Db) of
+ issuer_not_found ->
+ {error, issuer_not_found}
+ catch
+ {ok, _} = Result ->
+ Result
+ end.
verify_crl_issuer(CRL, ErlCertCandidate, Issuer, NotIssuer) ->
TBSCert = ErlCertCandidate#'OTPCertificate'.tbsCertificate,
diff --git a/lib/ssl/src/ssl_dist_admin_sup.erl b/lib/ssl/src/ssl_dist_admin_sup.erl
new file mode 100644
index 0000000000..f60806c4cb
--- /dev/null
+++ b/lib/ssl/src/ssl_dist_admin_sup.erl
@@ -0,0 +1,74 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2016-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_dist_admin_sup).
+
+-behaviour(supervisor).
+
+%% API
+-export([start_link/0]).
+
+%% Supervisor callback
+-export([init/1]).
+
+%%%=========================================================================
+%%% API
+%%%=========================================================================
+
+-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%=========================================================================
+%%% Supervisor callback
+%%%=========================================================================
+
+init([]) ->
+ PEMCache = pem_cache_child_spec(),
+ SessionCertManager = session_and_cert_manager_child_spec(),
+ {ok, {{rest_for_one, 10, 3600}, [PEMCache, SessionCertManager]}}.
+
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+pem_cache_child_spec() ->
+ Name = ssl_pem_cache_dist,
+ StartFunc = {ssl_pem_cache, start_link_dist, [[]]},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_pem_cache],
+ Type = worker,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+session_and_cert_manager_child_spec() ->
+ Opts = ssl_admin_sup:manager_opts(),
+ Name = ssl_dist_manager,
+ StartFunc = {ssl_manager, start_link_dist, [Opts]},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_manager],
+ Type = worker,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
diff --git a/lib/ssl/src/ssl_dist_connection_sup.erl b/lib/ssl/src/ssl_dist_connection_sup.erl
new file mode 100644
index 0000000000..e5842c866e
--- /dev/null
+++ b/lib/ssl/src/ssl_dist_connection_sup.erl
@@ -0,0 +1,79 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_dist_connection_sup).
+
+-behaviour(supervisor).
+
+%% API
+-export([start_link/0]).
+
+%% Supervisor callback
+-export([init/1]).
+
+%%%=========================================================================
+%%% API
+%%%=========================================================================
+
+-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%=========================================================================
+%%% Supervisor callback
+%%%=========================================================================
+
+init([]) ->
+
+ TLSConnetionManager = tls_connection_manager_child_spec(),
+ %% Handles emulated options so that they inherited by the accept
+ %% socket, even when setopts is performed on the listen socket
+ ListenOptionsTracker = listen_options_tracker_child_spec(),
+
+ {ok, {{one_for_one, 10, 3600}, [TLSConnetionManager,
+ ListenOptionsTracker
+ ]}}.
+
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+tls_connection_manager_child_spec() ->
+ Name = dist_tls_connection,
+ StartFunc = {tls_connection_sup, start_link_dist, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [tls_connection_sup],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+listen_options_tracker_child_spec() ->
+ Name = dist_tls_socket,
+ StartFunc = {ssl_listen_tracker_sup, start_link_dist, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [tls_socket],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
diff --git a/lib/ssl/src/ssl_dist_sup.erl b/lib/ssl/src/ssl_dist_sup.erl
index d47cd76bf5..690b896919 100644
--- a/lib/ssl/src/ssl_dist_sup.erl
+++ b/lib/ssl/src/ssl_dist_sup.erl
@@ -44,34 +44,29 @@ start_link() ->
%%%=========================================================================
init([]) ->
- SessionCertManager = session_and_cert_manager_child_spec(),
- ConnetionManager = connection_manager_child_spec(),
- ListenOptionsTracker = listen_options_tracker_child_spec(),
+ AdminSup = ssl_admin_child_spec(),
+ ConnectionSup = ssl_connection_sup(),
ProxyServer = proxy_server_child_spec(),
-
- {ok, {{one_for_all, 10, 3600}, [SessionCertManager, ConnetionManager,
- ListenOptionsTracker,
- ProxyServer]}}.
+ {ok, {{one_for_all, 10, 3600}, [AdminSup, ProxyServer, ConnectionSup]}}.
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-session_and_cert_manager_child_spec() ->
- Opts = ssl_sup:manager_opts(),
- Name = ssl_manager_dist,
- StartFunc = {ssl_manager, start_link_dist, [Opts]},
+ssl_admin_child_spec() ->
+ Name = ssl_dist_admin_sup,
+ StartFunc = {ssl_dist_admin_sup, start_link , []},
Restart = permanent,
Shutdown = 4000,
- Modules = [ssl_manager],
- Type = worker,
+ Modules = [ssl_admin_sup],
+ Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-connection_manager_child_spec() ->
- Name = ssl_connection_dist,
- StartFunc = {tls_connection_sup, start_link_dist, []},
- Restart = permanent,
- Shutdown = infinity,
- Modules = [tls_connection_sup],
+ssl_connection_sup() ->
+ Name = ssl_dist_connection_sup,
+ StartFunc = {ssl_dist_connection_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_connection_sup],
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
@@ -83,12 +78,3 @@ proxy_server_child_spec() ->
Modules = [ssl_tls_dist_proxy],
Type = worker,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-
-listen_options_tracker_child_spec() ->
- Name = tls_socket_dist,
- StartFunc = {ssl_listen_tracker_sup, start_link_dist, []},
- Restart = permanent,
- Shutdown = 4000,
- Modules = [tls_socket],
- Type = supervisor,
- {Name, StartFunc, Restart, Shutdown, Type, Modules}.
diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl
index 4acc745c5f..cb61c82334 100644
--- a/lib/ssl/src/ssl_handshake.erl
+++ b/lib/ssl/src/ssl_handshake.erl
@@ -397,14 +397,13 @@ verify_signature(_, Hash, {HashAlgo, _SignAlg}, Signature,
%%--------------------------------------------------------------------
certify(#certificate{asn1_certificates = ASN1Certs}, CertDbHandle, CertDbRef,
MaxPathLen, _Verify, ValidationFunAndState0, PartialChain, CRLCheck, CRLDbHandle, Role) ->
- [PeerCert | _] = ASN1Certs,
-
- ValidationFunAndState = validation_fun_and_state(ValidationFunAndState0, Role,
- CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle),
-
+ [PeerCert | _] = ASN1Certs,
try
{TrustedCert, CertPath} =
ssl_certificate:trusted_cert_and_path(ASN1Certs, CertDbHandle, CertDbRef, PartialChain),
+ ValidationFunAndState = validation_fun_and_state(ValidationFunAndState0, Role,
+ CertDbHandle, CertDbRef,
+ CRLCheck, CRLDbHandle, CertPath),
case public_key:pkix_path_validation(TrustedCert,
CertPath,
[{max_path_length, MaxPathLen},
@@ -1541,7 +1540,8 @@ sni1(Hostname) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) ->
+validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef,
+ CRLCheck, CRLDbHandle, CertPath) ->
{fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) ->
case ssl_certificate:validate(OtpCert,
Extension,
@@ -1550,22 +1550,25 @@ validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef, CRLC
{valid, {NewSslState, UserState}};
{fail, Reason} ->
apply_user_fun(Fun, OtpCert, Reason, UserState,
- SslState);
+ SslState, CertPath);
{unknown, _} ->
apply_user_fun(Fun, OtpCert,
- Extension, UserState, SslState)
+ Extension, UserState, SslState, CertPath)
end;
(OtpCert, VerifyResult, {SslState, UserState}) ->
apply_user_fun(Fun, OtpCert, VerifyResult, UserState,
- SslState)
+ SslState, CertPath)
end, {{Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}, UserState0}};
-validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) ->
+validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef,
+ CRLCheck, CRLDbHandle, CertPath) ->
{fun(OtpCert, {extension, _} = Extension, SslState) ->
ssl_certificate:validate(OtpCert,
Extension,
SslState);
- (OtpCert, VerifyResult, SslState) when (VerifyResult == valid) or (VerifyResult == valid_peer) ->
- case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of
+ (OtpCert, VerifyResult, SslState) when (VerifyResult == valid) or
+ (VerifyResult == valid_peer) ->
+ case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef,
+ CRLDbHandle, VerifyResult, CertPath) of
valid ->
{VerifyResult, SslState};
Reason ->
@@ -1578,20 +1581,21 @@ validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef, CRLCheck, CRL
end, {Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}}.
apply_user_fun(Fun, OtpCert, VerifyResult, UserState0,
- {_, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle} = SslState) when
+ {_, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle} = SslState, CertPath) when
(VerifyResult == valid) or (VerifyResult == valid_peer) ->
case Fun(OtpCert, VerifyResult, UserState0) of
{Valid, UserState} when (Valid == valid) or (Valid == valid_peer) ->
- case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of
+ case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef,
+ CRLDbHandle, VerifyResult, CertPath) of
valid ->
{Valid, {SslState, UserState}};
Result ->
- apply_user_fun(Fun, OtpCert, Result, UserState, SslState)
+ apply_user_fun(Fun, OtpCert, Result, UserState, SslState, CertPath)
end;
{fail, _} = Fail ->
Fail
end;
-apply_user_fun(Fun, OtpCert, ExtensionOrError, UserState0, SslState) ->
+apply_user_fun(Fun, OtpCert, ExtensionOrError, UserState0, SslState, _CertPath) ->
case Fun(OtpCert, ExtensionOrError, UserState0) of
{Valid, UserState} when (Valid == valid) or (Valid == valid_peer)->
{Valid, {SslState, UserState}};
@@ -2187,13 +2191,14 @@ handle_psk_identity(_PSKIdentity, LookupFun)
handle_psk_identity(PSKIdentity, {Fun, UserState}) ->
Fun(psk, PSKIdentity, UserState).
-crl_check(_, false, _,_,_, _) ->
+crl_check(_, false, _,_,_, _, _) ->
valid;
-crl_check(_, peer, _, _,_, valid) -> %% Do not check CAs with this option.
+crl_check(_, peer, _, _,_, valid, _) -> %% Do not check CAs with this option.
valid;
-crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _) ->
+crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _, CertPath) ->
Options = [{issuer_fun, {fun(_DP, CRL, Issuer, DBInfo) ->
- ssl_crl:trusted_cert_and_path(CRL, Issuer, DBInfo)
+ ssl_crl:trusted_cert_and_path(CRL, Issuer, {CertPath,
+ DBInfo})
end, {CertDbHandle, CertDbRef}}},
{update_crl, fun(DP, CRL) -> Callback:fresh_crl(DP, CRL) end}
],
@@ -2229,7 +2234,8 @@ dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) ->
no_dps;
DistPoints ->
Issuer = OtpCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer,
- distpoints_lookup(DistPoints, Issuer, Callback, CRLDbHandle)
+ CRLs = distpoints_lookup(DistPoints, Issuer, Callback, CRLDbHandle),
+ dps_and_crls(DistPoints, CRLs, [])
end;
dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) ->
@@ -2242,7 +2248,13 @@ dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) ->
end, GenNames),
[{DP, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs].
-distpoints_lookup([], _, _, _) ->
+dps_and_crls([], _, Acc) ->
+ Acc;
+dps_and_crls([DP | Rest], CRLs, Acc) ->
+ DpCRL = [{DP, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs],
+ dps_and_crls(Rest, CRLs, DpCRL ++ Acc).
+
+distpoints_lookup([],_, _, _) ->
[];
distpoints_lookup([DistPoint | Rest], Issuer, Callback, CRLDbHandle) ->
Result =
@@ -2257,7 +2269,7 @@ distpoints_lookup([DistPoint | Rest], Issuer, Callback, CRLDbHandle) ->
not_available ->
distpoints_lookup(Rest, Issuer, Callback, CRLDbHandle);
CRLs ->
- [{DistPoint, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs]
+ CRLs
end.
sign_algo(?rsaEncryption) ->
diff --git a/lib/ssl/src/ssl_handshake.hrl b/lib/ssl/src/ssl_handshake.hrl
index fde92035a2..324b7dbde3 100644
--- a/lib/ssl/src/ssl_handshake.hrl
+++ b/lib/ssl/src/ssl_handshake.hrl
@@ -80,6 +80,9 @@
-define(CLIENT_KEY_EXCHANGE, 16).
-define(FINISHED, 20).
+-define(MAX_UNIT24, 8388607).
+-define(DEFAULT_MAX_HANDSHAKE_SIZE, (256*1024)).
+
-record(random, {
gmt_unix_time, % uint32
random_bytes % opaque random_bytes[28]
diff --git a/lib/ssl/src/ssl_internal.hrl b/lib/ssl/src/ssl_internal.hrl
index 98b89bb811..c10ec3a2d6 100644
--- a/lib/ssl/src/ssl_internal.hrl
+++ b/lib/ssl/src/ssl_internal.hrl
@@ -76,7 +76,7 @@
-define(ALL_SUPPORTED_VERSIONS, ['tlsv1.2', 'tlsv1.1', tlsv1]).
-define(MIN_SUPPORTED_VERSIONS, ['tlsv1.1', tlsv1]).
-define(ALL_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]).
--define(MIN_DATAGRAM_SUPPORTED_VERSIONS, ['dtlsv1.2', dtlsv1]).
+-define(MIN_DATAGRAM_SUPPORTED_VERSIONS, [dtlsv1]).
-define('24H_in_msec', 86400000).
-define('24H_in_sec', 86400).
@@ -142,7 +142,8 @@
signature_algs,
eccs,
honor_ecc_order :: boolean(),
- v2_hello_compatible :: boolean()
+ v2_hello_compatible :: boolean(),
+ max_handshake_size :: integer()
}).
-record(socket_options,
diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl
index 5bd9521de7..2b82f18bb5 100644
--- a/lib/ssl/src/ssl_manager.erl
+++ b/lib/ssl/src/ssl_manager.erl
@@ -32,10 +32,9 @@
new_session_id/1, clean_cert_db/2,
register_session/2, register_session/3, invalidate_session/2,
insert_crls/2, insert_crls/3, delete_crls/1, delete_crls/2,
- invalidate_session/3, invalidate_pem/1, clear_pem_cache/0, manager_name/1]).
+ invalidate_session/3, name/1]).
-% Spawn export
--export([init_session_validator/1, init_pem_cache_validator/1]).
+-export([init_session_validator/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -52,9 +51,7 @@
session_lifetime :: integer(),
certificate_db :: db_handle(),
session_validation_timer :: reference(),
- last_delay_timer = {undefined, undefined},%% Keep for testing purposes
- last_pem_check :: erlang:timestamp(),
- clear_pem_cache :: integer(),
+ last_delay_timer = {undefined, undefined},%% Keep for testing purposes
session_cache_client_max :: integer(),
session_cache_server_max :: integer(),
session_server_invalidator :: undefined | pid(),
@@ -63,7 +60,6 @@
-define(GEN_UNIQUE_ID_MAX_TRIES, 10).
-define(SESSION_VALIDATION_INTERVAL, 60000).
--define(CLEAR_PEM_CACHE, 120000).
-define(CLEAN_SESSION_DB, 60000).
-define(CLEAN_CERT_DB, 500).
-define(DEFAULT_MAX_SESSION_CACHE, 1000).
@@ -74,14 +70,14 @@
%%====================================================================
%%--------------------------------------------------------------------
--spec manager_name(normal | dist) -> atom().
+-spec name(normal | dist) -> atom().
%%
%% Description: Returns the registered name of the ssl manager process
%% in the operation modes 'normal' and 'dist'.
%%--------------------------------------------------------------------
-manager_name(normal) ->
+name(normal) ->
?MODULE;
-manager_name(dist) ->
+name(dist) ->
list_to_atom(atom_to_list(?MODULE) ++ "dist").
%%--------------------------------------------------------------------
@@ -91,9 +87,10 @@ manager_name(dist) ->
%% and certificate caching.
%%--------------------------------------------------------------------
start_link(Opts) ->
- DistMangerName = manager_name(normal),
- gen_server:start_link({local, DistMangerName},
- ?MODULE, [DistMangerName, Opts], []).
+ MangerName = name(normal),
+ CacheName = ssl_pem_cache:name(normal),
+ gen_server:start_link({local, MangerName},
+ ?MODULE, [MangerName, CacheName, Opts], []).
%%--------------------------------------------------------------------
-spec start_link_dist(list()) -> {ok, pid()} | ignore | {error, term()}.
@@ -102,38 +99,23 @@ start_link(Opts) ->
%% be used by the erlang distribution. Note disables soft upgrade!
%%--------------------------------------------------------------------
start_link_dist(Opts) ->
- DistMangerName = manager_name(dist),
+ DistMangerName = name(dist),
+ DistCacheName = ssl_pem_cache:name(dist),
gen_server:start_link({local, DistMangerName},
- ?MODULE, [DistMangerName, Opts], []).
+ ?MODULE, [DistMangerName, DistCacheName, Opts], []).
%%--------------------------------------------------------------------
-spec connection_init(binary()| {der, list()}, client | server,
{Cb :: atom(), Handle:: term()}) ->
- {ok, certdb_ref(), db_handle(), db_handle(),
- db_handle(), db_handle(), CRLInfo::term()}.
+ {ok, map()}.
%%
%% Description: Do necessary initializations for a new connection.
%%--------------------------------------------------------------------
connection_init({der, _} = Trustedcerts, Role, CRLCache) ->
- case bypass_pem_cache() of
- true ->
- {ok, Extracted} = ssl_pkix_db:extract_trusted_certs(Trustedcerts),
- call({connection_init, Extracted, Role, CRLCache});
- false ->
- call({connection_init, Trustedcerts, Role, CRLCache})
- end;
-
-connection_init(<<>> = Trustedcerts, Role, CRLCache) ->
- call({connection_init, Trustedcerts, Role, CRLCache});
-
+ {ok, Extracted} = ssl_pkix_db:extract_trusted_certs(Trustedcerts),
+ call({connection_init, Extracted, Role, CRLCache});
connection_init(Trustedcerts, Role, CRLCache) ->
- case bypass_pem_cache() of
- true ->
- {ok, Extracted} = ssl_pkix_db:extract_trusted_certs(Trustedcerts),
- call({connection_init, Extracted, Role, CRLCache});
- false ->
- call({connection_init, Trustedcerts, Role, CRLCache})
- end.
+ call({connection_init, Trustedcerts, Role, CRLCache}).
%%--------------------------------------------------------------------
-spec cache_pem_file(binary(), term()) -> {ok, term()} | {error, reason()}.
@@ -141,31 +123,14 @@ connection_init(Trustedcerts, Role, CRLCache) ->
%% Description: Cache a pem file and return its content.
%%--------------------------------------------------------------------
cache_pem_file(File, DbHandle) ->
- case bypass_pem_cache() of
- true ->
- ssl_pkix_db:decode_pem_file(File);
- false ->
- case ssl_pkix_db:lookup_cached_pem(DbHandle, File) of
- [{Content,_}] ->
- {ok, Content};
- [Content] ->
- {ok, Content};
- undefined ->
- call({cache_pem, File})
- end
+ case ssl_pkix_db:lookup(File, DbHandle) of
+ [Content] ->
+ {ok, Content};
+ undefined ->
+ ssl_pem_cache:insert(File)
end.
%%--------------------------------------------------------------------
--spec clear_pem_cache() -> ok.
-%%
-%% Description: Clear the PEM cache
-%%--------------------------------------------------------------------
-clear_pem_cache() ->
- %% Not supported for distribution at the moement, should it be?
- put(ssl_manager, manager_name(normal)),
- call(unconditionally_clear_pem_cache).
-
-%%--------------------------------------------------------------------
-spec lookup_trusted_cert(term(), reference(), serialnumber(), issuer()) ->
undefined |
{ok, {der_cert(), #'OTPCertificate'{}}}.
@@ -222,26 +187,22 @@ invalidate_session(Port, Session) ->
load_mitigation(),
cast({invalidate_session, Port, Session}).
--spec invalidate_pem(File::binary()) -> ok.
-invalidate_pem(File) ->
- cast({invalidate_pem, File}).
-
insert_crls(Path, CRLs)->
insert_crls(Path, CRLs, normal).
insert_crls(?NO_DIST_POINT_PATH = Path, CRLs, ManagerType)->
- put(ssl_manager, manager_name(ManagerType)),
+ put(ssl_manager, name(ManagerType)),
cast({insert_crls, Path, CRLs});
insert_crls(Path, CRLs, ManagerType)->
- put(ssl_manager, manager_name(ManagerType)),
+ put(ssl_manager, name(ManagerType)),
call({insert_crls, Path, CRLs}).
delete_crls(Path)->
delete_crls(Path, normal).
delete_crls(?NO_DIST_POINT_PATH = Path, ManagerType)->
- put(ssl_manager, manager_name(ManagerType)),
+ put(ssl_manager, name(ManagerType)),
cast({delete_crls, Path});
delete_crls(Path, ManagerType)->
- put(ssl_manager, manager_name(ManagerType)),
+ put(ssl_manager, name(ManagerType)),
call({delete_crls, Path}).
%%====================================================================
@@ -255,13 +216,14 @@ delete_crls(Path, ManagerType)->
%%
%% Description: Initiates the server
%%--------------------------------------------------------------------
-init([Name, Opts]) ->
- put(ssl_manager, Name),
+init([ManagerName, PemCacheName, Opts]) ->
+ put(ssl_manager, ManagerName),
+ put(ssl_pem_cache, PemCacheName),
process_flag(trap_exit, true),
CacheCb = proplists:get_value(session_cb, Opts, ssl_session_cache),
SessionLifeTime =
proplists:get_value(session_lifetime, Opts, ?'24H_in_sec'),
- CertDb = ssl_pkix_db:create(),
+ CertDb = ssl_pkix_db:create(PemCacheName),
ClientSessionCache =
CacheCb:init([{role, client} |
proplists:get_value(session_cb_init_args, Opts, [])]),
@@ -270,16 +232,12 @@ init([Name, Opts]) ->
proplists:get_value(session_cb_init_args, Opts, [])]),
Timer = erlang:send_after(SessionLifeTime * 1000 + 5000,
self(), validate_sessions),
- Interval = pem_check_interval(),
- erlang:send_after(Interval, self(), clear_pem_cache),
{ok, #state{certificate_db = CertDb,
session_cache_client = ClientSessionCache,
session_cache_server = ServerSessionCache,
session_cache_cb = CacheCb,
session_lifetime = SessionLifeTime,
session_validation_timer = Timer,
- last_pem_check = os:timestamp(),
- clear_pem_cache = Interval,
session_cache_client_max =
max_session_cache_size(session_cache_client_max),
session_cache_server_max =
@@ -302,18 +260,25 @@ init([Name, Opts]) ->
handle_call({{connection_init, <<>>, Role, {CRLCb, UserCRLDb}}, _Pid}, _From,
#state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
Ref = make_ref(),
- Result = {ok, Ref, CertDb, FileRefDb, PemChace,
- session_cache(Role, State), {CRLCb, crl_db_info(Db, UserCRLDb)}},
- {reply, Result, State#state{certificate_db = Db}};
+ {reply, {ok, #{cert_db_ref => Ref,
+ cert_db_handle => CertDb,
+ fileref_db_handle => FileRefDb,
+ pem_cache => PemChace,
+ session_cache => session_cache(Role, State),
+ crl_db_info => {CRLCb, crl_db_info(Db, UserCRLDb)}}}, State};
handle_call({{connection_init, Trustedcerts, Role, {CRLCb, UserCRLDb}}, Pid}, _From,
#state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
case add_trusted_certs(Pid, Trustedcerts, Db) of
{ok, Ref} ->
- {reply, {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State),
- {CRLCb, crl_db_info(Db, UserCRLDb)}}, State};
- {error, _} = Error ->
- {reply, Error, State}
+ {reply, {ok, #{cert_db_ref => Ref,
+ cert_db_handle => CertDb,
+ fileref_db_handle => FileRefDb,
+ pem_cache => PemChace,
+ session_cache => session_cache(Role, State),
+ crl_db_info => {CRLCb, crl_db_info(Db, UserCRLDb)}}}, State};
+ {error, _} = Error ->
+ {reply, Error, State}
end;
handle_call({{insert_crls, Path, CRLs}, _}, _From,
@@ -330,21 +295,7 @@ handle_call({{new_session_id, Port}, _},
_, #state{session_cache_cb = CacheCb,
session_cache_server = Cache} = State) ->
Id = new_id(Port, ?GEN_UNIQUE_ID_MAX_TRIES, Cache, CacheCb),
- {reply, Id, State};
-
-handle_call({{cache_pem,File}, _Pid}, _,
- #state{certificate_db = Db} = State) ->
- try ssl_pkix_db:cache_pem_file(File, Db) of
- Result ->
- {reply, Result, State}
- catch
- _:Reason ->
- {reply, {error, Reason}, State}
- end;
-handle_call({unconditionally_clear_pem_cache, _},_,
- #state{certificate_db = [_,_,PemChace | _]} = State) ->
- ssl_pkix_db:clear(PemChace),
- {reply, ok, State}.
+ {reply, Id, State}.
%%--------------------------------------------------------------------
-spec handle_cast(msg(), #state{}) -> {noreply, #state{}}.
@@ -382,11 +333,6 @@ handle_cast({insert_crls, Path, CRLs},
handle_cast({delete_crls, CRLsOrPath},
#state{certificate_db = Db} = State) ->
ssl_pkix_db:remove_crls(Db, CRLsOrPath),
- {noreply, State};
-
-handle_cast({invalidate_pem, File},
- #state{certificate_db = [_, _, PemCache | _]} = State) ->
- ssl_pkix_db:remove(File, PemCache),
{noreply, State}.
%%--------------------------------------------------------------------
@@ -418,22 +364,14 @@ handle_info({delayed_clean_session, Key, Cache}, #state{session_cache_cb = Cache
CacheCb:delete(Cache, Key),
{noreply, State};
-handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace | _],
- clear_pem_cache = Interval,
- last_pem_check = CheckPoint} = State) ->
- NewCheckPoint = os:timestamp(),
- start_pem_cache_validator(PemChace, CheckPoint),
- erlang:send_after(Interval, self(), clear_pem_cache),
- {noreply, State#state{last_pem_check = NewCheckPoint}};
-
handle_info({clean_cert_db, Ref, File},
- #state{certificate_db = [CertDb,RefDb, PemCache | _]} = State) ->
+ #state{certificate_db = [CertDb, {RefDb, FileMapDb} | _]} = State) ->
case ssl_pkix_db:lookup(Ref, RefDb) of
undefined -> %% Alredy cleaned
ok;
_ ->
- clean_cert_db(Ref, CertDb, RefDb, PemCache, File)
+ clean_cert_db(Ref, CertDb, RefDb, FileMapDb, File)
end,
{noreply, State};
@@ -523,14 +461,6 @@ delay_time() ->
?CLEAN_SESSION_DB
end.
-bypass_pem_cache() ->
- case application:get_env(ssl, bypass_pem_cache) of
- {ok, Bool} when is_boolean(Bool) ->
- Bool;
- _ ->
- false
- end.
-
max_session_cache_size(CacheType) ->
case application:get_env(ssl, CacheType) of
{ok, Size} when is_integer(Size) ->
@@ -594,16 +524,11 @@ new_id(Port, Tries, Cache, CacheCb) ->
new_id(Port, Tries - 1, Cache, CacheCb)
end.
-clean_cert_db(Ref, CertDb, RefDb, PemCache, File) ->
+clean_cert_db(Ref, CertDb, RefDb, FileMapDb, File) ->
case ssl_pkix_db:ref_count(Ref, RefDb, 0) of
0 ->
- case ssl_pkix_db:lookup_cached_pem(PemCache, File) of
- [{Content, Ref}] ->
- ssl_pkix_db:insert(File, Content, PemCache);
- _ ->
- ok
- end,
ssl_pkix_db:remove(Ref, RefDb),
+ ssl_pkix_db:remove(File, FileMapDb),
ssl_pkix_db:remove_trusted_certs(Ref, CertDb);
_ ->
ok
@@ -687,42 +612,6 @@ exists_equivalent(#session{
exists_equivalent(Session, [ _ | Rest]) ->
exists_equivalent(Session, Rest).
-start_pem_cache_validator(PemCache, CheckPoint) ->
- spawn_link(?MODULE, init_pem_cache_validator,
- [[get(ssl_manager), PemCache, CheckPoint]]).
-
-init_pem_cache_validator([SslManagerName, PemCache, CheckPoint]) ->
- put(ssl_manager, SslManagerName),
- ssl_pkix_db:foldl(fun pem_cache_validate/2,
- CheckPoint, PemCache).
-
-pem_cache_validate({File, _}, CheckPoint) ->
- case file:read_file_info(File, []) of
- {ok, #file_info{mtime = Time}} ->
- case is_before_checkpoint(Time, CheckPoint) of
- true ->
- ok;
- false ->
- invalidate_pem(File)
- end;
- _ ->
- invalidate_pem(File)
- end,
- CheckPoint.
-
-pem_check_interval() ->
- case application:get_env(ssl, ssl_pem_cache_clean) of
- {ok, Interval} when is_integer(Interval) ->
- Interval;
- _ ->
- ?CLEAR_PEM_CACHE
- end.
-
-is_before_checkpoint(Time, CheckPoint) ->
- calendar:datetime_to_gregorian_seconds(
- calendar:now_to_datetime(CheckPoint)) -
- calendar:datetime_to_gregorian_seconds(Time) > 0.
-
add_trusted_certs(Pid, Trustedcerts, Db) ->
try
ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db)
diff --git a/lib/ssl/src/ssl_pem_cache.erl b/lib/ssl/src/ssl_pem_cache.erl
new file mode 100644
index 0000000000..f63a301f69
--- /dev/null
+++ b/lib/ssl/src/ssl_pem_cache.erl
@@ -0,0 +1,266 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 20016-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%----------------------------------------------------------------------
+%% Purpose: Manages ssl sessions and trusted certifacates
+%%----------------------------------------------------------------------
+
+-module(ssl_pem_cache).
+-behaviour(gen_server).
+
+%% Internal application API
+-export([start_link/1,
+ start_link_dist/1,
+ name/1,
+ insert/1,
+ clear/0]).
+
+% Spawn export
+-export([init_pem_cache_validator/1]).
+
+%% gen_server callbacks
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-include("ssl_handshake.hrl").
+-include("ssl_internal.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-record(state, {
+ pem_cache,
+ last_pem_check :: erlang:timestamp(),
+ clear :: integer()
+ }).
+
+-define(CLEAR_PEM_CACHE, 120000).
+-define(DEFAULT_MAX_SESSION_CACHE, 1000).
+
+%%====================================================================
+%% API
+%%====================================================================
+
+%%--------------------------------------------------------------------
+-spec name(normal | dist) -> atom().
+%%
+%% Description: Returns the registered name of the ssl cache process
+%% in the operation modes 'normal' and 'dist'.
+%%--------------------------------------------------------------------
+name(normal) ->
+ ?MODULE;
+name(dist) ->
+ list_to_atom(atom_to_list(?MODULE) ++ "dist").
+
+%%--------------------------------------------------------------------
+-spec start_link(list()) -> {ok, pid()} | ignore | {error, term()}.
+%%
+%% Description: Starts the ssl pem cache handler
+%%--------------------------------------------------------------------
+start_link(_) ->
+ CacheName = name(normal),
+ gen_server:start_link({local, CacheName},
+ ?MODULE, [CacheName], []).
+
+%%--------------------------------------------------------------------
+-spec start_link_dist(list()) -> {ok, pid()} | ignore | {error, term()}.
+%%
+%% Description: Starts a special instance of the ssl manager to
+%% be used by the erlang distribution. Note disables soft upgrade!
+%%--------------------------------------------------------------------
+start_link_dist(_) ->
+ DistCacheName = name(dist),
+ gen_server:start_link({local, DistCacheName},
+ ?MODULE, [DistCacheName], []).
+
+
+%%--------------------------------------------------------------------
+-spec insert(binary()) -> {ok, term()} | {error, reason()}.
+%%
+%% Description: Cache a pem file and return its content.
+%%--------------------------------------------------------------------
+insert(File) ->
+ {ok, PemBin} = file:read_file(File),
+ Content = public_key:pem_decode(PemBin),
+ case bypass_cache() of
+ true ->
+ {ok, Content};
+ false ->
+ cast({cache_pem, File, Content}),
+ {ok, Content}
+ end.
+
+%%--------------------------------------------------------------------
+-spec clear() -> ok.
+%%
+%% Description: Clear the PEM cache
+%%--------------------------------------------------------------------
+clear() ->
+ %% Not supported for distribution at the moement, should it be?
+ put(ssl_pem_cache, name(normal)),
+ call(unconditionally_clear_pem_cache).
+
+-spec invalidate_pem(File::binary()) -> ok.
+invalidate_pem(File) ->
+ cast({invalidate_pem, File}).
+
+%%====================================================================
+%% gen_server callbacks
+%%====================================================================
+
+%%--------------------------------------------------------------------
+-spec init(list()) -> {ok, #state{}}.
+%% Possible return values not used now.
+%% | {ok, #state{}, timeout()} | ignore | {stop, term()}.
+%%
+%% Description: Initiates the server
+%%--------------------------------------------------------------------
+init([Name]) ->
+ put(ssl_pem_cache, Name),
+ process_flag(trap_exit, true),
+ PemCache = ssl_pkix_db:create_pem_cache(Name),
+ Interval = pem_check_interval(),
+ erlang:send_after(Interval, self(), clear_pem_cache),
+ {ok, #state{pem_cache = PemCache,
+ last_pem_check = os:timestamp(),
+ clear = Interval
+ }}.
+
+%%--------------------------------------------------------------------
+-spec handle_call(msg(), from(), #state{}) -> {reply, reply(), #state{}}.
+%% Possible return values not used now.
+%% {reply, reply(), #state{}, timeout()} |
+%% {noreply, #state{}} |
+%% {noreply, #state{}, timeout()} |
+%% {stop, reason(), reply(), #state{}} |
+%% {stop, reason(), #state{}}.
+%%
+%% Description: Handling call messages
+%%--------------------------------------------------------------------
+handle_call({unconditionally_clear_pem_cache, _},_,
+ #state{pem_cache = PemCache} = State) ->
+ ssl_pkix_db:clear(PemCache),
+ {reply, ok, State}.
+
+%%--------------------------------------------------------------------
+-spec handle_cast(msg(), #state{}) -> {noreply, #state{}}.
+%% Possible return values not used now.
+%% | {noreply, #state{}, timeout()} |
+%% {stop, reason(), #state{}}.
+%%
+%% Description: Handling cast messages
+%%--------------------------------------------------------------------
+handle_cast({cache_pem, File, Content}, #state{pem_cache = Db} = State) ->
+ ssl_pkix_db:insert(File, Content, Db),
+ {noreply, State};
+
+handle_cast({invalidate_pem, File}, #state{pem_cache = Db} = State) ->
+ ssl_pkix_db:remove(File, Db),
+ {noreply, State}.
+
+
+%%--------------------------------------------------------------------
+-spec handle_info(msg(), #state{}) -> {noreply, #state{}}.
+%% Possible return values not used now.
+%% |{noreply, #state{}, timeout()} |
+%% {stop, reason(), #state{}}.
+%%
+%% Description: Handling all non call/cast messages
+%%-------------------------------------------------------------------
+handle_info(clear_pem_cache, #state{pem_cache = PemCache,
+ clear = Interval,
+ last_pem_check = CheckPoint} = State) ->
+ NewCheckPoint = os:timestamp(),
+ start_pem_cache_validator(PemCache, CheckPoint),
+ erlang:send_after(Interval, self(), clear_pem_cache),
+ {noreply, State#state{last_pem_check = NewCheckPoint}};
+
+handle_info(_Info, State) ->
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+-spec terminate(reason(), #state{}) -> ok.
+%%
+%% Description: This function is called by a gen_server when it is about to
+%% terminate. It should be the opposite of Module:init/1 and do any necessary
+%% cleaning up. When it returns, the gen_server terminates with Reason.
+%% The return value is ignored.
+%%--------------------------------------------------------------------
+terminate(_Reason, #state{}) ->
+ ok.
+
+%%--------------------------------------------------------------------
+-spec code_change(term(), #state{}, list()) -> {ok, #state{}}.
+%%
+%% Description: Convert process state when code is changed
+%%--------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+call(Msg) ->
+ gen_server:call(get(ssl_pem_cache), {Msg, self()}, infinity).
+
+cast(Msg) ->
+ gen_server:cast(get(ssl_pem_cache), Msg).
+
+start_pem_cache_validator(PemCache, CheckPoint) ->
+ spawn_link(?MODULE, init_pem_cache_validator,
+ [[get(ssl_pem_cache), PemCache, CheckPoint]]).
+
+init_pem_cache_validator([CacheName, PemCache, CheckPoint]) ->
+ put(ssl_pem_cache, CacheName),
+ ssl_pkix_db:foldl(fun pem_cache_validate/2,
+ CheckPoint, PemCache).
+
+pem_cache_validate({File, _}, CheckPoint) ->
+ case file:read_file_info(File, []) of
+ {ok, #file_info{mtime = Time}} ->
+ case is_before_checkpoint(Time, CheckPoint) of
+ true ->
+ ok;
+ false ->
+ invalidate_pem(File)
+ end;
+ _ ->
+ invalidate_pem(File)
+ end,
+ CheckPoint.
+
+is_before_checkpoint(Time, CheckPoint) ->
+ calendar:datetime_to_gregorian_seconds(
+ calendar:now_to_datetime(CheckPoint)) -
+ calendar:datetime_to_gregorian_seconds(Time) > 0.
+
+pem_check_interval() ->
+ case application:get_env(ssl, ssl_pem_cache_clean) of
+ {ok, Interval} when is_integer(Interval) ->
+ Interval;
+ _ ->
+ ?CLEAR_PEM_CACHE
+ end.
+
+bypass_cache() ->
+ case application:get_env(ssl, bypass_pem_cache) of
+ {ok, Bool} when is_boolean(Bool) ->
+ Bool;
+ _ ->
+ false
+ end.
diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl
index b4299969e4..cde05bb16f 100644
--- a/lib/ssl/src/ssl_pkix_db.erl
+++ b/lib/ssl/src/ssl_pkix_db.erl
@@ -28,11 +28,11 @@
-include_lib("public_key/include/public_key.hrl").
-include_lib("kernel/include/file.hrl").
--export([create/0, add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3,
+-export([create/1, create_pem_cache/1,
+ add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3,
extract_trusted_certs/1,
remove_trusted_certs/2, insert/3, remove/2, clear/1, db_size/1,
ref_count/3, lookup_trusted_cert/4, foldl/3, select_cert_by_issuer/2,
- lookup_cached_pem/2, cache_pem_file/2, cache_pem_file/3,
decode_pem_file/1, lookup/2]).
%%====================================================================
@@ -40,25 +40,31 @@
%%====================================================================
%%--------------------------------------------------------------------
--spec create() -> [db_handle(),...].
+-spec create(atom()) -> [db_handle(),...].
%%
%% Description: Creates a new certificate db.
%% Note: lookup_trusted_cert/4 may be called from any process but only
%% the process that called create may call the other functions.
%%--------------------------------------------------------------------
-create() ->
+create(PEMCacheName) ->
[%% Let connection process delete trusted certs
%% that can only belong to one connection. (Supplied directly
%% on DER format to ssl:connect/listen.)
ets:new(ssl_otp_cacertificate_db, [set, public]),
%% Let connection processes call ref_count/3 directly
- ets:new(ssl_otp_ca_file_ref, [set, public]),
- ets:new(ssl_otp_pem_cache, [set, protected]),
+ {ets:new(ssl_otp_ca_file_ref, [set, public]),
+ ets:new(ssl_otp_ca_ref_file_mapping, [set, protected])
+ },
+ %% Lookups in named table owned by ssl_pem_cache process
+ PEMCacheName,
%% Default cache
{ets:new(ssl_otp_crl_cache, [set, protected]),
ets:new(ssl_otp_crl_issuer_mapping, [bag, protected])}
].
+create_pem_cache(Name) ->
+ ets:new(Name, [named_table, set, protected]).
+
%%--------------------------------------------------------------------
-spec remove([db_handle()]) -> ok.
%%
@@ -70,6 +76,10 @@ remove(Dbs) ->
true = ets:delete(Db1);
(undefined) ->
ok;
+ (ssl_pem_cache) ->
+ ok;
+ (ssl_pem_cache_dist) ->
+ ok;
(Db) ->
true = ets:delete(Db)
end, Dbs).
@@ -101,11 +111,6 @@ lookup_trusted_cert(_DbHandle, {extracted,Certs}, SerialNumber, Issuer) ->
{ok, Cert}
end.
-lookup_cached_pem([_, _, PemChache | _], File) ->
- lookup_cached_pem(PemChache, File);
-lookup_cached_pem(PemChache, File) ->
- lookup(File, PemChache).
-
%%--------------------------------------------------------------------
-spec add_trusted_certs(pid(), {erlang:timestamp(), string()} |
{der, list()}, [db_handle()]) -> {ok, [db_handle()]}.
@@ -122,17 +127,11 @@ add_trusted_certs(_Pid, {der, DerList}, [CertDb, _,_ | _]) ->
add_certs_from_der(DerList, NewRef, CertDb),
{ok, NewRef};
-add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache | _] = Db) ->
- case lookup_cached_pem(Db, File) of
- [{_Content, Ref}] ->
+add_trusted_certs(_Pid, File, [ _, {RefDb, FileMapDb} | _] = Db) ->
+ case lookup(File, FileMapDb) of
+ [Ref] ->
ref_count(Ref, RefDb, 1),
{ok, Ref};
- [Content] ->
- Ref = make_ref(),
- update_counter(Ref, 1, RefDb),
- insert(File, {Content, Ref}, PemChache),
- add_certs_from_pem(Content, Ref, CertsDb),
- {ok, Ref};
undefined ->
new_trusted_cert_entry(File, Db)
end.
@@ -151,25 +150,6 @@ extract_trusted_certs(File) ->
{error, {badmatch, Error}}
end.
-%%--------------------------------------------------------------------
-%%
-%% Description: Cache file as binary in DB
-%%--------------------------------------------------------------------
--spec cache_pem_file(binary(), [db_handle()]) -> {ok, term()}.
-cache_pem_file(File, [_CertsDb, _RefDb, PemChache | _]) ->
- {ok, PemBin} = file:read_file(File),
- Content = public_key:pem_decode(PemBin),
- insert(File, Content, PemChache),
- {ok, Content}.
-
-
--spec cache_pem_file(reference(), binary(), [db_handle()]) -> {ok, term()}.
-cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache| _]) ->
- {ok, PemBin} = file:read_file(File),
- Content = public_key:pem_decode(PemBin),
- insert(File, {Content, Ref}, PemChache),
- {ok, Content}.
-
-spec decode_pem_file(binary()) -> {ok, term()}.
decode_pem_file(File) ->
case file:read_file(File) of
@@ -246,6 +226,8 @@ select_cert_by_issuer(Cache, Issuer) ->
%%--------------------------------------------------------------------
ref_count({extracted, _}, _Db, _N) ->
not_cached;
+ref_count(Key, {Db, _}, N) ->
+ ref_count(Key, Db, N);
ref_count(Key, Db, N) ->
ets:update_counter(Db,Key,N).
@@ -278,9 +260,9 @@ insert(Key, Data, Db) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-update_counter(Key, Count, Db) ->
- true = ets:insert(Db, {Key, Count}),
- ok.
+init_ref_db(Ref, File, {RefDb, FileMapDb}) ->
+ true = ets:insert(RefDb, {Ref, 1}),
+ true = ets:insert(FileMapDb, {File, Ref}).
remove_certs(Ref, CertsDb) ->
true = ets:match_delete(CertsDb, {{Ref, '_', '_'}, '_'}),
@@ -326,10 +308,10 @@ decode_certs(Ref, Cert) ->
undefined
end.
-new_trusted_cert_entry(File, [CertsDb, RefDb, _ | _] = Db) ->
+new_trusted_cert_entry(File, [CertsDb, RefsDb, _ | _]) ->
Ref = make_ref(),
- update_counter(Ref, 1, RefDb),
- {ok, Content} = cache_pem_file(Ref, File, Db),
+ init_ref_db(Ref, File, RefsDb),
+ {ok, Content} = ssl_pem_cache:insert(File),
add_certs_from_pem(Content, Ref, CertsDb),
{ok, Ref}.
diff --git a/lib/ssl/src/ssl_record.erl b/lib/ssl/src/ssl_record.erl
index b10069c3cb..539e189c4f 100644
--- a/lib/ssl/src/ssl_record.erl
+++ b/lib/ssl/src/ssl_record.erl
@@ -67,7 +67,7 @@
connection_state().
%%
%% Description: Returns the instance of the connection_state map
-%% that is currently defined as the current conection state.
+%% that is currently defined as the current connection state.
%%--------------------------------------------------------------------
current_connection_state(ConnectionStates, read) ->
maps:get(current_read, ConnectionStates);
@@ -79,7 +79,7 @@ current_connection_state(ConnectionStates, write) ->
connection_state().
%%
%% Description: Returns the instance of the connection_state map
-%% that is pendingly defined as the pending conection state.
+%% that is pendingly defined as the pending connection state.
%%--------------------------------------------------------------------
pending_connection_state(ConnectionStates, read) ->
maps:get(pending_read, ConnectionStates);
diff --git a/lib/ssl/src/ssl_sup.erl b/lib/ssl/src/ssl_sup.erl
index 8245801139..05a7aaaa82 100644
--- a/lib/ssl/src/ssl_sup.erl
+++ b/lib/ssl/src/ssl_sup.erl
@@ -25,7 +25,7 @@
-behaviour(supervisor).
%% API
--export([start_link/0, manager_opts/0]).
+-export([start_link/0]).
%% Supervisor callback
-export([init/1]).
@@ -44,90 +44,28 @@ start_link() ->
%%%=========================================================================
init([]) ->
- SessionCertManager = session_and_cert_manager_child_spec(),
- TLSConnetionManager = tls_connection_manager_child_spec(),
- %% Handles emulated options so that they inherited by the accept
- %% socket, even when setopts is performed on the listen socket
- ListenOptionsTracker = listen_options_tracker_child_spec(),
-
- DTLSConnetionManager = dtls_connection_manager_child_spec(),
- DTLSUdpListeners = dtls_udp_listeners_spec(),
+ {ok, {{rest_for_one, 10, 3600}, [ssl_admin_child_spec(),
+ ssl_connection_sup()
+ ]}}.
- {ok, {{one_for_all, 10, 3600}, [SessionCertManager, TLSConnetionManager,
- ListenOptionsTracker,
- DTLSConnetionManager, DTLSUdpListeners
- ]}}.
-
-
-manager_opts() ->
- CbOpts = case application:get_env(ssl, session_cb) of
- {ok, Cb} when is_atom(Cb) ->
- InitArgs = session_cb_init_args(),
- [{session_cb, Cb}, {session_cb_init_args, InitArgs}];
- _ ->
- []
- end,
- case application:get_env(ssl, session_lifetime) of
- {ok, Time} when is_integer(Time) ->
- [{session_lifetime, Time}| CbOpts];
- _ ->
- CbOpts
- end.
-
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-
-session_and_cert_manager_child_spec() ->
- Opts = manager_opts(),
- Name = ssl_manager,
- StartFunc = {ssl_manager, start_link, [Opts]},
+ssl_admin_child_spec() ->
+ Name = ssl_admin_sup,
+ StartFunc = {ssl_admin_sup, start_link, []},
Restart = permanent,
Shutdown = 4000,
- Modules = [ssl_manager],
- Type = worker,
- {Name, StartFunc, Restart, Shutdown, Type, Modules}.
-
-tls_connection_manager_child_spec() ->
- Name = tls_connection,
- StartFunc = {tls_connection_sup, start_link, []},
- Restart = permanent,
- Shutdown = 4000,
- Modules = [tls_connection_sup],
+ Modules = [ssl_admin_sup],
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-dtls_connection_manager_child_spec() ->
- Name = dtls_connection,
- StartFunc = {dtls_connection_sup, start_link, []},
+ssl_connection_sup() ->
+ Name = ssl_connection_sup,
+ StartFunc = {ssl_connection_sup, start_link, []},
Restart = permanent,
Shutdown = 4000,
- Modules = [dtls_connection_sup],
- Type = supervisor,
- {Name, StartFunc, Restart, Shutdown, Type, Modules}.
-
-listen_options_tracker_child_spec() ->
- Name = tls_socket,
- StartFunc = {ssl_listen_tracker_sup, start_link, []},
- Restart = permanent,
- Shutdown = 4000,
- Modules = [tls_socket],
- Type = supervisor,
- {Name, StartFunc, Restart, Shutdown, Type, Modules}.
-
-dtls_udp_listeners_spec() ->
- Name = dtls_udp_listener,
- StartFunc = {dtls_udp_sup, start_link, []},
- Restart = permanent,
- Shutdown = 4000,
- Modules = [],
+ Modules = [ssl_connection_sup],
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-session_cb_init_args() ->
- case application:get_env(ssl, session_cb_init_args) of
- {ok, Args} when is_list(Args) ->
- Args;
- _ ->
- []
- end.
diff --git a/lib/ssl/src/tls_connection.erl b/lib/ssl/src/tls_connection.erl
index 32991d3079..c6e530e164 100644
--- a/lib/ssl/src/tls_connection.erl
+++ b/lib/ssl/src/tls_connection.erl
@@ -48,7 +48,7 @@
-export([encode_data/3, encode_alert/3]).
%% State transition handling
--export([next_record/1, next_event/3]).
+-export([next_record/1, next_event/3, next_event/4]).
%% Handshake handling
-export([renegotiate/2, send_handshake/2,
@@ -59,7 +59,8 @@
-export([send_alert/2, close/5]).
%% Data handling
--export([passive_receive/2, next_record_if_active/1, handle_common_event/4, send/3]).
+-export([passive_receive/2, next_record_if_active/1, handle_common_event/4, send/3,
+ socket/5]).
%% gen_statem state functions
-export([init/3, error/3, downgrade/3, %% Initiation and take down states
@@ -117,7 +118,7 @@ send_handshake_flight(#state{socket = Socket,
transport_cb = Transport,
flight_buffer = Flight} = State0) ->
send(Transport, Socket, Flight),
- State0#state{flight_buffer = []}.
+ {State0#state{flight_buffer = []}, []}.
queue_change_cipher(Msg, #state{negotiated_version = Version,
flight_buffer = Flight0,
@@ -191,6 +192,10 @@ init([Role, Host, Port, Socket, Options, User, CbInfo]) ->
callback_mode() ->
state_functions.
+socket(Pid, Transport, Socket, Connection, Tracker) ->
+ tls_socket:socket(Pid, Transport, Socket, Connection, Tracker).
+
+
%%--------------------------------------------------------------------
%% State functions
%%--------------------------------------------------------------------
@@ -340,12 +345,12 @@ connection(internal, #hello_request{},
renegotiation = {Renegotiation, _}} = State0) ->
Hello = tls_handshake:client_hello(Host, Port, ConnectionStates0, SslOpts,
Cache, CacheCb, Renegotiation, Cert),
- State1 = send_handshake(Hello, State0),
+ {State1, Actions} = send_handshake(Hello, State0),
{Record, State} =
next_record(
State1#state{session = Session0#session{session_id
= Hello#client_hello.session_id}}),
- next_event(hello, Record, State);
+ next_event(hello, Record, State, Actions);
connection(internal, #client_hello{} = Hello,
#state{role = server, allow_renegotiate = true} = State0) ->
%% Mitigate Computational DoS attack
@@ -424,18 +429,26 @@ handle_common_event(internal, #ssl_tls{type = ?HANDSHAKE, fragment = Data},
ssl_options = Options} = State0) ->
try
{Packets, Buf} = tls_handshake:get_tls_handshake(Version,Data,Buf0, Options),
- State =
+ State1 =
State0#state{protocol_buffers =
Buffers#protocol_buffers{tls_handshake_buffer = Buf}},
- Events = tls_handshake_events(Packets),
- case StateName of
- connection ->
- ssl_connection:hibernate_after(StateName, State, Events);
- _ ->
- {next_state, StateName, State#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events}
- end
+ case Packets of
+ [] ->
+ assert_buffer_sanity(Buf, Options),
+ {Record, State} = next_record(State1),
+ next_event(StateName, Record, State);
+ _ ->
+ Events = tls_handshake_events(Packets),
+ case StateName of
+ connection ->
+ ssl_connection:hibernate_after(StateName, State1, Events);
+ _ ->
+ {next_state, StateName,
+ State1#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events}
+ end
+ end
catch throw:#alert{} = Alert ->
- ssl_connection:handle_own_alert(Alert, Version, StateName, State0)
+ ssl_connection:handle_own_alert(Alert, Version, StateName, State0)
end;
%%% TLS record protocol level application data messages
handle_common_event(internal, #ssl_tls{type = ?APPLICATION_DATA, fragment = Data}, StateName, State) ->
@@ -615,8 +628,6 @@ next_event(StateName, Record, State, Actions) ->
{next_state, StateName, State, [{next_event, internal, Alert} | Actions]}
end.
-tls_handshake_events([]) ->
- throw(?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE, malformed_handshake));
tls_handshake_events(Packets) ->
lists:map(fun(Packet) ->
{next_event, internal, {handshake, Packet}}
@@ -735,3 +746,25 @@ unprocessed_events(Events) ->
%% handshake events left to process before we should
%% process more TLS-records received on the socket.
erlang:length(Events)-1.
+
+
+assert_buffer_sanity(<<?BYTE(_Type), ?UINT24(Length), Rest/binary>>, #ssl_options{max_handshake_size = Max}) when
+ Length =< Max ->
+ case size(Rest) of
+ N when N < Length ->
+ true;
+ N when N > Length ->
+ throw(?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE,
+ too_big_handshake_data));
+ _ ->
+ throw(?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE,
+ malformed_handshake_data))
+ end;
+assert_buffer_sanity(Bin, _) ->
+ case size(Bin) of
+ N when N < 3 ->
+ true;
+ _ ->
+ throw(?ALERT_REC(?FATAL, ?HANDSHAKE_FAILURE,
+ malformed_handshake_data))
+ end.
diff --git a/lib/ssl/src/tls_handshake.erl b/lib/ssl/src/tls_handshake.erl
index 2800ee6537..5726561865 100644
--- a/lib/ssl/src/tls_handshake.erl
+++ b/lib/ssl/src/tls_handshake.erl
@@ -88,7 +88,7 @@ client_hello(Host, Port, ConnectionStates,
#hello_extensions{}, {ssl_cipher:hash(), ssl_cipher:sign_algo()} | undefined} |
#alert{}.
%%
-%% Description: Handles a recieved hello message
+%% Description: Handles a received hello message
%%--------------------------------------------------------------------
hello(#server_hello{server_version = Version, random = Random,
cipher_suite = CipherSuite,
@@ -192,7 +192,8 @@ handle_client_hello(Version, #client_hello{session_id = SugesstedId,
end.
get_tls_handshake_aux(Version, <<?BYTE(Type), ?UINT24(Length),
- Body:Length/binary,Rest/binary>>, #ssl_options{v2_hello_compatible = V2Hello} = Opts, Acc) ->
+ Body:Length/binary,Rest/binary>>,
+ #ssl_options{v2_hello_compatible = V2Hello} = Opts, Acc) ->
Raw = <<?BYTE(Type), ?UINT24(Length), Body/binary>>,
try decode_handshake(Version, Type, Body, V2Hello) of
Handshake ->
@@ -207,27 +208,17 @@ get_tls_handshake_aux(_Version, Data, _, Acc) ->
decode_handshake(_, ?HELLO_REQUEST, <<>>, _) ->
#hello_request{};
-%% Client hello v2.
-%% The server must be able to receive such messages, from clients that
-%% are willing to use ssl v3 or higher, but have ssl v2 compatibility.
-decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor),
- ?UINT16(CSLength), ?UINT16(0),
- ?UINT16(CDLength),
- CipherSuites:CSLength/binary,
- ChallengeData:CDLength/binary>>, true) ->
- #client_hello{client_version = {Major, Minor},
- random = ssl_v2:client_random(ChallengeData, CDLength),
- session_id = 0,
- cipher_suites = ssl_handshake:decode_suites('3_bytes', CipherSuites),
- compression_methods = [?NULL],
- extensions = #hello_extensions{}
- };
-decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(_), ?BYTE(_),
- ?UINT16(CSLength), ?UINT16(0),
- ?UINT16(CDLength),
- _CipherSuites:CSLength/binary,
- _ChallengeData:CDLength/binary>>, false) ->
- throw(?ALERT_REC(?FATAL, ?PROTOCOL_VERSION, ssl_v2_client_hello_no_supported));
+decode_handshake(_Version, ?CLIENT_HELLO, Bin, true) ->
+ try decode_hello(Bin) of
+ Hello ->
+ Hello
+ catch
+ _:_ ->
+ decode_v2_hello(Bin)
+ end;
+decode_handshake(_Version, ?CLIENT_HELLO, Bin, false) ->
+ decode_hello(Bin);
+
decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor), Random:32/binary,
?BYTE(SID_length), Session_ID:SID_length/binary,
?UINT16(Cs_length), CipherSuites:Cs_length/binary,
@@ -244,10 +235,40 @@ decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor), Random:3
compression_methods = Comp_methods,
extensions = DecodedExtensions
};
-
decode_handshake(Version, Tag, Msg, _) ->
ssl_handshake:decode_handshake(Version, Tag, Msg).
+
+decode_hello(<<?BYTE(Major), ?BYTE(Minor), Random:32/binary,
+ ?BYTE(SID_length), Session_ID:SID_length/binary,
+ ?UINT16(Cs_length), CipherSuites:Cs_length/binary,
+ ?BYTE(Cm_length), Comp_methods:Cm_length/binary,
+ Extensions/binary>>) ->
+ DecodedExtensions = ssl_handshake:decode_hello_extensions({client, Extensions}),
+
+ #client_hello{
+ client_version = {Major,Minor},
+ random = Random,
+ session_id = Session_ID,
+ cipher_suites = ssl_handshake:decode_suites('2_bytes', CipherSuites),
+ compression_methods = Comp_methods,
+ extensions = DecodedExtensions
+ }.
+%% The server must be able to receive such messages, from clients that
+%% are willing to use ssl v3 or higher, but have ssl v2 compatibility.
+decode_v2_hello(<<?BYTE(Major), ?BYTE(Minor),
+ ?UINT16(CSLength), ?UINT16(0),
+ ?UINT16(CDLength),
+ CipherSuites:CSLength/binary,
+ ChallengeData:CDLength/binary>>) ->
+ #client_hello{client_version = {Major, Minor},
+ random = ssl_v2:client_random(ChallengeData, CDLength),
+ session_id = 0,
+ cipher_suites = ssl_handshake:decode_suites('3_bytes', CipherSuites),
+ compression_methods = [?NULL],
+ extensions = #hello_extensions{}
+ }.
+
enc_handshake(#hello_request{}, _Version) ->
{?HELLO_REQUEST, <<>>};
enc_handshake(#client_hello{client_version = {Major, Minor},
diff --git a/lib/ssl/src/tls_v1.erl b/lib/ssl/src/tls_v1.erl
index 7f24ce5192..f52ee06e71 100644
--- a/lib/ssl/src/tls_v1.erl
+++ b/lib/ssl/src/tls_v1.erl
@@ -204,21 +204,21 @@ suites(Minor) when Minor == 1; Minor == 2 ->
?TLS_ECDH_RSA_WITH_AES_256_CBC_SHA,
?TLS_RSA_WITH_AES_256_CBC_SHA,
- ?TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA,
- ?TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,
- ?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA,
- ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA,
- ?TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA,
- ?TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA,
- ?TLS_RSA_WITH_3DES_EDE_CBC_SHA,
-
?TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
?TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
?TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
?TLS_DHE_DSS_WITH_AES_128_CBC_SHA,
?TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA,
?TLS_ECDH_RSA_WITH_AES_128_CBC_SHA,
- ?TLS_RSA_WITH_AES_128_CBC_SHA
+ ?TLS_RSA_WITH_AES_128_CBC_SHA,
+
+ ?TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA,
+ ?TLS_RSA_WITH_3DES_EDE_CBC_SHA
];
suites(3) ->
[
@@ -407,7 +407,7 @@ is_pair(Hash, rsa, Hashs) ->
AtLeastMd5 = Hashs -- [md2,md4],
lists:member(Hash, AtLeastMd5).
-%% list ECC curves in prefered order
+%% list ECC curves in preferred order
-spec ecc_curves(1..3 | all) -> [named_curve()].
ecc_curves(all) ->
[sect571r1,sect571k1,secp521r1,brainpoolP512r1,
diff --git a/lib/ssl/test/make_certs.erl b/lib/ssl/test/make_certs.erl
index d85be6c69e..e14f7f60c4 100644
--- a/lib/ssl/test/make_certs.erl
+++ b/lib/ssl/test/make_certs.erl
@@ -172,8 +172,8 @@ revoke(Root, CA, User, C) ->
gencrl(Root, CA, C).
gencrl(Root, CA, C) ->
- %% By default, the CRL is valid for 24 hours from now.
- gencrl(Root, CA, C, 24).
+ %% By default, the CRL is valid for a week from now.
+ gencrl(Root, CA, C, 24*7).
gencrl(Root, CA, C, CrlHours) ->
CACnfFile = filename:join([Root, CA, "ca.cnf"]),
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index 52c1af5b4c..bff6d254f1 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -53,7 +53,8 @@ all() ->
{group, options_tls},
{group, session},
{group, 'dtlsv1.2'},
- %%{group, 'dtlsv1'},
+ %% {group, 'dtlsv1'}, Breaks dtls in cert_verify_SUITE enable later when
+ %% problem is identified and fixed
{group, 'tlsv1.2'},
{group, 'tlsv1.1'},
{group, 'tlsv1'},
@@ -65,15 +66,15 @@ groups() ->
{basic_tls, [], basic_tests_tls()},
{options, [], options_tests()},
{options_tls, [], options_tests_tls()},
- %%{'dtlsv1.2', [], all_versions_groups()},
- {'dtlsv1.2', [], [connection_information]},
- %%{'dtlsv1', [], all_versions_groups()},
+ {'dtlsv1.2', [], all_versions_groups()},
+ {'dtlsv1', [], all_versions_groups()},
{'tlsv1.2', [], all_versions_groups() ++ tls_versions_groups() ++ [conf_signature_algs, no_common_signature_algs]},
{'tlsv1.1', [], all_versions_groups() ++ tls_versions_groups()},
{'tlsv1', [], all_versions_groups() ++ tls_versions_groups() ++ rizzo_tests()},
{'sslv3', [], all_versions_groups() ++ tls_versions_groups() ++ rizzo_tests() ++ [tls_ciphersuite_vs_version]},
{api,[], api_tests()},
{api_tls,[], api_tests_tls()},
+ {tls_ciphers,[], tls_cipher_tests()},
{session, [], session_tests()},
{renegotiate, [], renegotiate_tests()},
{ciphers, [], cipher_tests()},
@@ -83,12 +84,13 @@ groups() ->
].
tls_versions_groups ()->
- [{group, api_tls},
+ [{group, renegotiate}, %% Should be in all_versions_groups not fixed for DTLS yet
+ {group, api_tls},
+ {group, tls_ciphers},
{group, error_handling_tests_tls}].
all_versions_groups ()->
[{group, api},
- {group, renegotiate},
{group, ciphers},
{group, ciphers_ec},
{group, error_handling_tests}].
@@ -136,7 +138,8 @@ options_tests() ->
honor_server_cipher_order,
honor_client_cipher_order,
unordered_protocol_versions_server,
- unordered_protocol_versions_client
+ unordered_protocol_versions_client,
+ max_handshake_size
].
options_tests_tls() ->
@@ -146,10 +149,8 @@ options_tests_tls() ->
api_tests() ->
[connection_info,
connection_information,
- peername,
peercert,
peercert_with_client_cert,
- sockname,
versions,
eccs,
controlling_process,
@@ -161,7 +162,6 @@ api_tests() ->
ssl_recv_timeout,
server_name_indication_option,
accept_pool,
- new_options_in_accept,
prf
].
@@ -174,7 +174,10 @@ api_tests_tls() ->
tls_shutdown,
tls_shutdown_write,
tls_shutdown_both,
- tls_shutdown_error
+ tls_shutdown_error,
+ peername,
+ sockname,
+ new_options_in_accept
].
session_tests() ->
@@ -196,6 +199,11 @@ renegotiate_tests() ->
renegotiate_dos_mitigate_passive,
renegotiate_dos_mitigate_absolute].
+tls_cipher_tests() ->
+ [rc4_rsa_cipher_suites,
+ rc4_ecdh_rsa_cipher_suites,
+ rc4_ecdsa_cipher_suites].
+
cipher_tests() ->
[cipher_suites,
cipher_suites_mix,
@@ -211,9 +219,6 @@ cipher_tests() ->
srp_cipher_suites,
srp_anon_cipher_suites,
srp_dsa_cipher_suites,
- rc4_rsa_cipher_suites,
- rc4_ecdh_rsa_cipher_suites,
- rc4_ecdsa_cipher_suites,
des_rsa_cipher_suites,
des_ecdh_rsa_cipher_suites,
default_reject_anonymous].
@@ -225,15 +230,15 @@ cipher_tests_ec() ->
ciphers_ecdh_rsa_signed_certs_openssl_names].
error_handling_tests()->
- [controller_dies,
- close_transport_accept,
+ [close_transport_accept,
recv_active,
recv_active_once,
recv_error_handling
].
error_handling_tests_tls()->
- [tls_client_closes_socket,
+ [controller_dies,
+ tls_client_closes_socket,
tls_tcp_error_propagation_in_active_mode,
tls_tcp_connect,
tls_tcp_connect_big,
@@ -842,8 +847,7 @@ controller_dies(Config) when is_list(Config) ->
Server ! listen,
Tester = self(),
Connect = fun(Pid) ->
- {ok, Socket} = ssl:connect(Hostname, Port,
- [{reuseaddr,true},{ssl_imp,new}]),
+ {ok, Socket} = ssl:connect(Hostname, Port, ClientOpts),
%% Make sure server finishes and verification
%% and is in coonection state before
%% killing client
@@ -960,9 +964,9 @@ clear_pem_cache(Config) when is_list(Config) ->
{status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)),
[_, _,_, _, Prop] = StatusInfo,
State = ssl_test_lib:state(Prop),
- [_,FilRefDb |_] = element(6, State),
+ [_,{FilRefDb, _} |_] = element(6, State),
{Server, Client} = basic_verify_test_no_close(Config),
- CountReferencedFiles = fun({_,-1}, Acc) ->
+ CountReferencedFiles = fun({_, -1}, Acc) ->
Acc;
({_, N}, Acc) ->
N + Acc
@@ -2193,8 +2197,9 @@ ciphers_dsa_signed_certs() ->
[{doc,"Test all dsa ssl cipher suites in highest support ssl/tls version"}].
ciphers_dsa_signed_certs(Config) when is_list(Config) ->
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:dsa_suites(tls_record:protocol_version(Version)),
+ Ciphers = ssl_test_lib:dsa_suites(NVersion),
ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]),
run_suites(Ciphers, Version, Config, dsa).
%%-------------------------------------------------------------------
@@ -2217,29 +2222,33 @@ anonymous_cipher_suites(Config) when is_list(Config) ->
psk_cipher_suites() ->
[{doc, "Test the PSK ciphersuites WITHOUT server supplied identity hint"}].
psk_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:psk_suites(),
+ Ciphers = ssl_test_lib:psk_suites(NVersion),
run_suites(Ciphers, Version, Config, psk).
%%-------------------------------------------------------------------
psk_with_hint_cipher_suites()->
[{doc, "Test the PSK ciphersuites WITH server supplied identity hint"}].
psk_with_hint_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:psk_suites(),
+ Ciphers = ssl_test_lib:psk_suites(NVersion),
run_suites(Ciphers, Version, Config, psk_with_hint).
%%-------------------------------------------------------------------
psk_anon_cipher_suites() ->
[{doc, "Test the anonymous PSK ciphersuites WITHOUT server supplied identity hint"}].
psk_anon_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:psk_anon_suites(),
+ Ciphers = ssl_test_lib:psk_anon_suites(NVersion),
run_suites(Ciphers, Version, Config, psk_anon).
%%-------------------------------------------------------------------
psk_anon_with_hint_cipher_suites()->
[{doc, "Test the anonymous PSK ciphersuites WITH server supplied identity hint"}].
psk_anon_with_hint_cipher_suites(Config) when is_list(Config) ->
+ NVersion = tls_record:highest_protocol_version([]),
Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:psk_anon_suites(),
+ Ciphers = ssl_test_lib:psk_anon_suites(NVersion),
run_suites(Ciphers, Version, Config, psk_anon_with_hint).
%%-------------------------------------------------------------------
srp_cipher_suites()->
@@ -2290,18 +2299,17 @@ rc4_ecdsa_cipher_suites(Config) when is_list(Config) ->
%%-------------------------------------------------------------------
des_rsa_cipher_suites()->
- [{doc, "Test the RC4 ciphersuites"}].
+ [{doc, "Test the des_rsa ciphersuites"}].
des_rsa_cipher_suites(Config) when is_list(Config) ->
- NVersion = tls_record:highest_protocol_version([]),
- Version = tls_record:protocol_version(NVersion),
- Ciphers = ssl_test_lib:des_suites(NVersion),
+ Version = ssl_test_lib:protocol_version(Config),
+ Ciphers = ssl_test_lib:des_suites(Config),
run_suites(Ciphers, Version, Config, des_rsa).
%-------------------------------------------------------------------
des_ecdh_rsa_cipher_suites()->
- [{doc, "Test the RC4 ciphersuites"}].
+ [{doc, "Test ECDH rsa signed ciphersuites"}].
des_ecdh_rsa_cipher_suites(Config) when is_list(Config) ->
- NVersion = tls_record:highest_protocol_version([]),
- Version = tls_record:protocol_version(NVersion),
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:des_suites(NVersion),
run_suites(Ciphers, Version, Config, des_dhe_rsa).
@@ -2312,9 +2320,11 @@ default_reject_anonymous(Config) when is_list(Config) ->
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
- Version = tls_record:highest_protocol_version(tls_record:supported_protocol_versions()),
- [CipherSuite | _] = ssl_test_lib:anonymous_suites(Version),
-
+ Version = ssl_test_lib:protocol_version(Config),
+ TLSVersion = ssl_test_lib:tls_version(Version),
+
+ [CipherSuite | _] = ssl_test_lib:anonymous_suites(TLSVersion),
+
Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
{from, self()},
{options, ServerOpts}]),
@@ -2334,8 +2344,9 @@ ciphers_ecdsa_signed_certs() ->
[{doc, "Test all ecdsa ssl cipher suites in highest support ssl/tls version"}].
ciphers_ecdsa_signed_certs(Config) when is_list(Config) ->
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:ecdsa_suites(tls_record:protocol_version(Version)),
+ Ciphers = ssl_test_lib:ecdsa_suites(NVersion),
ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]),
run_suites(Ciphers, Version, Config, ecdsa).
%%--------------------------------------------------------------------
@@ -2352,8 +2363,9 @@ ciphers_ecdh_rsa_signed_certs() ->
[{doc, "Test all ecdh_rsa ssl cipher suites in highest support ssl/tls version"}].
ciphers_ecdh_rsa_signed_certs(Config) when is_list(Config) ->
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:ecdh_rsa_suites(tls_record:protocol_version(Version)),
+ Ciphers = ssl_test_lib:ecdh_rsa_suites(NVersion),
ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]),
run_suites(Ciphers, Version, Config, ecdh_rsa).
%%--------------------------------------------------------------------
@@ -3325,11 +3337,11 @@ hibernate(Config) ->
process_info(Pid, current_function),
ssl_test_lib:check_result(Server, ok, Client, ok),
- timer:sleep(1100),
-
+
+ timer:sleep(1500),
{current_function, {erlang, hibernate, 3}} =
process_info(Pid, current_function),
-
+
ssl_test_lib:close(Server),
ssl_test_lib:close(Client).
@@ -3362,13 +3374,12 @@ hibernate_right_away(Config) ->
[{port, Port1}, {options, [{hibernate_after, 0}|ClientOpts]}]),
ssl_test_lib:check_result(Server1, ok, Client1, ok),
-
- {current_function, {erlang, hibernate, 3}} =
+
+ {current_function, {erlang, hibernate, 3}} =
process_info(Pid1, current_function),
-
ssl_test_lib:close(Server1),
ssl_test_lib:close(Client1),
-
+
Server2 = ssl_test_lib:start_server(StartServerOpts),
Port2 = ssl_test_lib:inet_port(Server2),
{Client2, #sslsocket{pid = Pid2}} = ssl_test_lib:start_client(StartClientOpts ++
@@ -3376,8 +3387,8 @@ hibernate_right_away(Config) ->
ssl_test_lib:check_result(Server2, ok, Client2, ok),
- ct:sleep(100), %% Schedule out
-
+ ct:sleep(1000), %% Schedule out
+
{current_function, {erlang, hibernate, 3}} =
process_info(Pid2, current_function),
@@ -3860,6 +3871,29 @@ unordered_protocol_versions_client(Config) when is_list(Config) ->
ssl_test_lib:check_result(Server, ServerMsg, Client, ClientMsg).
%%--------------------------------------------------------------------
+max_handshake_size() ->
+ [{doc,"Test that we can set max_handshake_size to max value."}].
+
+max_handshake_size(Config) when is_list(Config) ->
+ ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+
+ {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
+ Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, [{max_handshake_size, 8388607} |ServerOpts]}]),
+ Port = ssl_test_lib:inet_port(Server),
+
+ Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
+ {host, Hostname},
+ {from, self()},
+ {mfa, {ssl_test_lib, send_recv_result_active, []}},
+ {options, [{max_handshake_size, 8388607} | ClientOpts]}]),
+
+ ssl_test_lib:check_result(Server, ok, Client, ok).
+
+%%--------------------------------------------------------------------
server_name_indication_option() ->
[{doc,"Test API server_name_indication option to connect."}].
@@ -4483,16 +4517,21 @@ run_suites(Ciphers, Version, Config, Type) ->
[{reuseaddr, true}, {ciphers, ssl_test_lib:anonymous_suites(Version)}]};
psk ->
{ssl_test_lib:ssl_options(client_psk, Config),
- ssl_test_lib:ssl_options(server_psk, Config)};
+ [{ciphers, ssl_test_lib:psk_suites(Version)} |
+ ssl_test_lib:ssl_options(server_psk, Config)]};
psk_with_hint ->
{ssl_test_lib:ssl_options(client_psk, Config),
- ssl_test_lib:ssl_options(server_psk_hint, Config)};
+ [{ciphers, ssl_test_lib:psk_suites(Version)} |
+ ssl_test_lib:ssl_options(server_psk_hint, Config)
+ ]};
psk_anon ->
{ssl_test_lib:ssl_options(client_psk, Config),
- ssl_test_lib:ssl_options(server_psk_anon, Config)};
+ [{ciphers, ssl_test_lib:psk_anon_suites(Version)} |
+ ssl_test_lib:ssl_options(server_psk_anon, Config)]};
psk_anon_with_hint ->
{ssl_test_lib:ssl_options(client_psk, Config),
- ssl_test_lib:ssl_options(server_psk_anon_hint, Config)};
+ [{ciphers, ssl_test_lib:psk_anon_suites(Version)} |
+ ssl_test_lib:ssl_options(server_psk_anon_hint, Config)]};
srp ->
{ssl_test_lib:ssl_options(client_srp, Config),
ssl_test_lib:ssl_options(server_srp, Config)};
@@ -4532,7 +4571,7 @@ run_suites(Ciphers, Version, Config, Type) ->
Result = lists:map(fun(Cipher) ->
cipher(Cipher, Version, Config, ClientOpts, ServerOpts) end,
- ssl_test_lib:filter_suites(Ciphers)),
+ ssl_test_lib:filter_suites(Ciphers, Version)),
case lists:flatten(Result) of
[] ->
ok;
diff --git a/lib/ssl/test/ssl_bench_SUITE.erl b/lib/ssl/test/ssl_bench_SUITE.erl
index 21989f8d99..70fd0af9b4 100644
--- a/lib/ssl/test/ssl_bench_SUITE.erl
+++ b/lib/ssl/test/ssl_bench_SUITE.erl
@@ -88,7 +88,6 @@ end_per_testcase(_Func, _Conf) ->
-define(FPROF_SERVER, false).
-define(EPROF_CLIENT, false).
-define(EPROF_SERVER, false).
--define(PERCEPT_SERVER, false).
%% Current numbers gives roughly a testcase per minute on todays hardware..
@@ -190,7 +189,6 @@ server_init(ssl, setup_connection, _, _, Server) ->
?FPROF_SERVER andalso start_profile(fprof, [whereis(ssl_manager), new]),
%%?EPROF_SERVER andalso start_profile(eprof, [ssl_connection_sup, ssl_manager]),
?EPROF_SERVER andalso start_profile(eprof, [ssl_manager]),
- ?PERCEPT_SERVER andalso percept:profile("/tmp/ssl_server.percept"),
Server ! {self(), {init, Host, Port}},
Test = fun(TSocket) ->
ok = ssl:ssl_accept(TSocket),
@@ -247,7 +245,6 @@ setup_server_connection(LSocket, Test) ->
receive quit ->
?FPROF_SERVER andalso stop_profile(fprof, "test_server_res.fprof"),
?EPROF_SERVER andalso stop_profile(eprof, "test_server_res.eprof"),
- ?PERCEPT_SERVER andalso stop_profile(percept, "/tmp/ssl_server.percept"),
ok
after 0 ->
case ssl:transport_accept(LSocket, 2000) of
@@ -388,13 +385,6 @@ start_profile(fprof, Procs) ->
fprof:trace([start, {procs, Procs}]),
io:format("(F)Profiling ...",[]).
-stop_profile(percept, File) ->
- percept:stop_profile(),
- percept:analyze(File),
- {started, _Host, Port} = percept:start_webserver(),
- wx:new(),
- wx_misc:launchDefaultBrowser("http://" ++ net_adm:localhost() ++ ":" ++ integer_to_list(Port)),
- ok;
stop_profile(eprof, File) ->
profiling_stopped = eprof:stop_profiling(),
eprof:log(File),
diff --git a/lib/ssl/test/ssl_certificate_verify_SUITE.erl b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
index 5265c87e29..4552a4f57d 100644
--- a/lib/ssl/test/ssl_certificate_verify_SUITE.erl
+++ b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
@@ -39,17 +39,26 @@
%% Common Test interface functions -----------------------------------
%%--------------------------------------------------------------------
all() ->
- [{group, active},
- {group, passive},
- {group, active_once},
- {group, error_handling}].
-
+ [
+ {group, tls},
+ {group, dtls}
+ ].
groups() ->
- [{active, [], tests()},
+ [
+ {tls, [], all_protocol_groups()},
+ {dtls, [], all_protocol_groups()},
+ {active, [], tests()},
{active_once, [], tests()},
{passive, [], tests()},
- {error_handling, [],error_handling_tests()}].
+ {error_handling, [],error_handling_tests()}
+ ].
+
+all_protocol_groups() ->
+ [{group, active},
+ {group, passive},
+ {group, active_once},
+ {group, error_handling}].
tests() ->
[verify_peer,
@@ -85,7 +94,7 @@ init_per_suite(Config0) ->
catch crypto:stop(),
try crypto:start() of
ok ->
- ssl_test_lib:clean_start(),
+ ssl_test_lib:clean_start(),
%% make rsa certs using oppenssl
{ok, _} = make_certs:all(proplists:get_value(data_dir, Config0),
proplists:get_value(priv_dir, Config0)),
@@ -99,6 +108,26 @@ end_per_suite(_Config) ->
ssl:stop(),
application:stop(crypto).
+init_per_group(tls, Config) ->
+ Version = tls_record:protocol_version(tls_record:highest_protocol_version([])),
+ ssl:stop(),
+ application:load(ssl),
+ application:set_env(ssl, protocol_version, Version),
+ application:set_env(ssl, bypass_pem_cache, Version),
+ ssl:start(),
+ NewConfig = proplists:delete(protocol, Config),
+ [{protocol, tls}, {version, tls_record:protocol_version(Version)} | NewConfig];
+
+init_per_group(dtls, Config) ->
+ Version = dtls_record:protocol_version(dtls_record:highest_protocol_version([])),
+ ssl:stop(),
+ application:load(ssl),
+ application:set_env(ssl, protocol_version, Version),
+ application:set_env(ssl, bypass_pem_cache, Version),
+ ssl:start(),
+ NewConfig = proplists:delete(protocol_opts, proplists:delete(protocol, Config)),
+ [{protocol, dtls}, {protocol_opts, [{protocol, dtls}]}, {version, dtls_record:protocol_version(Version)} | NewConfig];
+
init_per_group(active, Config) ->
[{active, true}, {receive_function, send_recv_result_active} | Config];
init_per_group(active_once, Config) ->
@@ -262,7 +291,7 @@ server_require_peer_cert_fail() ->
server_require_peer_cert_fail(Config) when is_list(Config) ->
ServerOpts = [{verify, verify_peer}, {fail_if_no_peer_cert, true}
| ssl_test_lib:ssl_options(server_verification_opts, Config)],
- BadClientOpts = ssl_test_lib:ssl_options(client_opts, []),
+ BadClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
@@ -411,7 +440,7 @@ server_require_peer_cert_partial_chain_fun_fail() ->
server_require_peer_cert_partial_chain_fun_fail(Config) when is_list(Config) ->
ServerOpts = [{verify, verify_peer}, {fail_if_no_peer_cert, true}
| ssl_test_lib:ssl_options(server_verification_opts, Config)],
- ClientOpts = proplists:get_value(client_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
{ok, ServerCAs} = file:read_file(proplists:get_value(cacertfile, ServerOpts)),
@@ -1091,6 +1120,7 @@ client_with_cert_cipher_suites_handshake() ->
client_with_cert_cipher_suites_handshake(Config) when is_list(Config) ->
ClientOpts = ssl_test_lib:ssl_options(client_verification_opts_digital_signature_only, Config),
ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
+
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
{from, self()},
@@ -1098,7 +1128,7 @@ client_with_cert_cipher_suites_handshake(Config) when is_list(Config) ->
send_recv_result_active, []}},
{options, [{active, true},
{ciphers,
- ssl_test_lib:rsa_non_signed_suites(tls_record:highest_protocol_version([]))}
+ ssl_test_lib:rsa_non_signed_suites(proplists:get_value(version, Config))}
| ServerOpts]}]),
Port = ssl_test_lib:inet_port(Server),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
@@ -1132,7 +1162,7 @@ server_verify_no_cacerts(Config) when is_list(Config) ->
unknown_server_ca_fail() ->
[{doc,"Test that the client fails if the ca is unknown in verify_peer mode"}].
unknown_server_ca_fail(Config) when is_list(Config) ->
- ClientOpts = ssl_test_lib:ssl_options(client_opts, []),
+ ClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config),
ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
@@ -1176,7 +1206,7 @@ unknown_server_ca_fail(Config) when is_list(Config) ->
unknown_server_ca_accept_verify_none() ->
[{doc,"Test that the client succeds if the ca is unknown in verify_none mode"}].
unknown_server_ca_accept_verify_none(Config) when is_list(Config) ->
- ClientOpts = ssl_test_lib:ssl_options(client_opts, []),
+ ClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config),
ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
@@ -1201,8 +1231,8 @@ unknown_server_ca_accept_verify_peer() ->
[{doc, "Test that the client succeds if the ca is unknown in verify_peer mode"
" with a verify_fun that accepts the unknown ca error"}].
unknown_server_ca_accept_verify_peer(Config) when is_list(Config) ->
- ClientOpts =ssl_test_lib:ssl_options(client_opts, []),
- ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
{from, self()},
@@ -1240,7 +1270,7 @@ unknown_server_ca_accept_verify_peer(Config) when is_list(Config) ->
unknown_server_ca_accept_backwardscompatibility() ->
[{doc,"Test that old style verify_funs will work"}].
unknown_server_ca_accept_backwardscompatibility(Config) when is_list(Config) ->
- ClientOpts = ssl_test_lib:ssl_options(client_opts, []),
+ ClientOpts = ssl_test_lib:ssl_options(empty_client_opts, Config),
ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
diff --git a/lib/ssl/test/ssl_handshake_SUITE.erl b/lib/ssl/test/ssl_handshake_SUITE.erl
index 74b14145dd..0a50c98a28 100644
--- a/lib/ssl/test/ssl_handshake_SUITE.erl
+++ b/lib/ssl/test/ssl_handshake_SUITE.erl
@@ -33,6 +33,7 @@
%% Common Test interface functions -----------------------------------
%%--------------------------------------------------------------------
all() -> [decode_hello_handshake,
+ decode_hello_handshake_version_confusion,
decode_single_hello_extension_correctly,
decode_supported_elliptic_curves_hello_extension_correctly,
decode_unknown_hello_extension_correctly,
@@ -106,6 +107,14 @@ decode_hello_handshake(_Config) ->
#renegotiation_info{renegotiated_connection = <<0>>}
= (Hello#server_hello.extensions)#hello_extensions.renegotiation_info.
+
+decode_hello_handshake_version_confusion(_) ->
+ HelloPacket = <<3,3,0,0,0,0,0,63,210,235,149,6,244,140,108,13,177,74,16,218,33,108,219,41,73,228,3,82,132,123,73,144,118,100,0,0,32,192,4,0,10,192,45,192,38,0,47,192,18,0,163,0,22,0,165,192,29,192,18,192,30,0,103,0,57,192,48,0,47,1,0>>,
+ Version = {3,3},
+ ClientHello = 1,
+ Hello = tls_handshake:decode_handshake({3,3}, ClientHello, HelloPacket, false),
+ Hello = tls_handshake:decode_handshake({3,3}, ClientHello, HelloPacket, true).
+
decode_single_hello_extension_correctly(_Config) ->
Renegotiation = <<?UINT16(?RENEGOTIATION_EXT), ?UINT16(1), 0>>,
Extensions = ssl_handshake:decode_hello_extensions(Renegotiation),
diff --git a/lib/ssl/test/ssl_npn_hello_SUITE.erl b/lib/ssl/test/ssl_npn_hello_SUITE.erl
index 69aeea10c5..0b1de1dc1c 100644
--- a/lib/ssl/test/ssl_npn_hello_SUITE.erl
+++ b/lib/ssl/test/ssl_npn_hello_SUITE.erl
@@ -50,6 +50,10 @@ init_per_suite(Config) ->
{skip, "Crypto did not start"}
end.
+end_per_suite(_Config) ->
+ %% This function is required since init_per_suite/1 exists.
+ ok.
+
init_per_testcase(_TestCase, Config) ->
ssl_test_lib:ct_log_supported_protocol_versions(Config),
ct:timetrap({seconds, 5}),
diff --git a/lib/ssl/test/ssl_pem_cache_SUITE.erl b/lib/ssl/test/ssl_pem_cache_SUITE.erl
index f10d27fbc6..96b15d9b51 100644
--- a/lib/ssl/test/ssl_pem_cache_SUITE.erl
+++ b/lib/ssl/test/ssl_pem_cache_SUITE.erl
@@ -82,8 +82,8 @@ pem_cleanup() ->
[{doc, "Test pem cache invalidate mechanism"}].
pem_cleanup(Config)when is_list(Config) ->
process_flag(trap_exit, true),
- ClientOpts = proplists:get_value(client_opts, Config),
- ServerOpts = proplists:get_value(server_opts, Config),
+ ClientOpts = proplists:get_value(client_verification_opts, Config),
+ ServerOpts = proplists:get_value(server_verification_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server =
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index 9632103696..7a644968f2 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -278,8 +278,11 @@ check_result(Server, ServerMsg, Client, ClientMsg) ->
check_result(Server, ServerMsg);
{Port, {data,Debug}} when is_port(Port) ->
- ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
+ ct:log("~p:~p~n Openssl ~s~n",[?MODULE,?LINE, Debug]),
check_result(Server, ServerMsg, Client, ClientMsg);
+ {Port,closed} when is_port(Port) ->
+ ct:log("~p:~p~n Openssl port ~n",[?MODULE,?LINE]),
+ check_result(Server, ServerMsg, Client, ClientMsg);
Unexpected ->
Reason = {{expected, {Client, ClientMsg}},
{expected, {Server, ServerMsg}}, {got, Unexpected}},
@@ -291,11 +294,11 @@ check_result(Pid, Msg) ->
{Pid, Msg} ->
ok;
{Port, {data,Debug}} when is_port(Port) ->
- ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
+ ct:log("~p:~p~n Openssl ~s~n",[?MODULE,?LINE, Debug]),
check_result(Pid,Msg);
- %% {Port, {exit_status, Status}} when is_port(Port) ->
- %% ct:log("~p:~p Exit status: ~p~n",[?MODULE,?LINE, Status]),
- %% check_result(Pid, Msg);
+ {Port,closed} when is_port(Port)->
+ ct:log("~p:~p Openssl port closed ~n",[?MODULE,?LINE]),
+ check_result(Pid, Msg);
Unexpected ->
Reason = {{expected, {Pid, Msg}},
{got, Unexpected}},
@@ -398,27 +401,22 @@ cert_options(Config) ->
{ssl_imp, new}]},
{server_opts, [{ssl_imp, new},{reuseaddr, true}, {cacertfile, ServerCaCertFile},
{certfile, ServerCertFile}, {keyfile, ServerKeyFile}]},
- %%{server_anon, [{ssl_imp, new},{reuseaddr, true}, {ciphers, anonymous_suites()}]},
- {client_psk, [{ssl_imp, new},{reuseaddr, true},
+ {client_psk, [{ssl_imp, new},
{psk_identity, "Test-User"},
{user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]},
{server_psk, [{ssl_imp, new},{reuseaddr, true},
{certfile, ServerCertFile}, {keyfile, ServerKeyFile},
- {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}},
- {ciphers, psk_suites()}]},
+ {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]},
{server_psk_hint, [{ssl_imp, new},{reuseaddr, true},
{certfile, ServerCertFile}, {keyfile, ServerKeyFile},
{psk_identity, "HINT"},
- {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}},
- {ciphers, psk_suites()}]},
+ {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]},
{server_psk_anon, [{ssl_imp, new},{reuseaddr, true},
- {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}},
- {ciphers, psk_anon_suites()}]},
+ {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]},
{server_psk_anon_hint, [{ssl_imp, new},{reuseaddr, true},
{psk_identity, "HINT"},
- {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}},
- {ciphers, psk_anon_suites()}]},
- {client_srp, [{ssl_imp, new},{reuseaddr, true},
+ {user_lookup_fun, {fun user_lookup/3, PskSharedSecret}}]},
+ {client_srp, [{ssl_imp, new},
{srp_identity, {"Test-User", "secret"}}]},
{server_srp, [{ssl_imp, new},{reuseaddr, true},
{certfile, ServerCertFile}, {keyfile, ServerKeyFile},
@@ -473,7 +471,7 @@ make_dsa_cert(Config) ->
{cacertfile, ClientCaCertFile},
{certfile, ServerCertFile}, {keyfile, ServerKeyFile},
{verify, verify_peer}]},
- {client_dsa_opts, [{ssl_imp, new},{reuseaddr, true},
+ {client_dsa_opts, [{ssl_imp, new},
{cacertfile, ClientCaCertFile},
{certfile, ClientCertFile}, {keyfile, ClientKeyFile}]},
{server_srp_dsa, [{ssl_imp, new},{reuseaddr, true},
@@ -481,7 +479,7 @@ make_dsa_cert(Config) ->
{certfile, ServerCertFile}, {keyfile, ServerKeyFile},
{user_lookup_fun, {fun user_lookup/3, undefined}},
{ciphers, srp_dss_suites()}]},
- {client_srp_dsa, [{ssl_imp, new},{reuseaddr, true},
+ {client_srp_dsa, [{ssl_imp, new},
{srp_identity, {"Test-User", "secret"}},
{cacertfile, ClientCaCertFile},
{certfile, ClientCertFile}, {keyfile, ClientKeyFile}]}
@@ -502,7 +500,7 @@ make_ecdsa_cert(Config) ->
{cacertfile, ClientCaCertFile},
{certfile, ServerCertFile}, {keyfile, ServerKeyFile},
{verify, verify_peer}]},
- {client_ecdsa_opts, [{ssl_imp, new},{reuseaddr, true},
+ {client_ecdsa_opts, [{ssl_imp, new},
{cacertfile, ClientCaCertFile},
{certfile, ClientCertFile}, {keyfile, ClientKeyFile}]}
| Config];
@@ -537,7 +535,7 @@ make_ecdh_rsa_cert(Config) ->
{cacertfile, ClientCaCertFile},
{certfile, ServerCertFile}, {keyfile, ServerKeyFile},
{verify, verify_peer}]},
- {client_ecdh_rsa_opts, [{ssl_imp, new},{reuseaddr, true},
+ {client_ecdh_rsa_opts, [{ssl_imp, new},
{cacertfile, ClientCaCertFile},
{certfile, ClientCertFile}, {keyfile, ClientKeyFile}]}
| Config];
@@ -557,7 +555,7 @@ make_mix_cert(Config) ->
{cacertfile, ClientCaCertFile},
{certfile, ServerCertFile}, {keyfile, ServerKeyFile},
{verify, verify_peer}]},
- {client_mix_opts, [{ssl_imp, new},{reuseaddr, true},
+ {client_mix_opts, [{ssl_imp, new},
{cacertfile, ClientCaCertFile},
{certfile, ClientCertFile}, {keyfile, ClientKeyFile}]}
| Config].
@@ -827,17 +825,17 @@ rsa_suites(CounterPart) ->
({dhe_rsa, des_cbc, sha}) when FIPS == true ->
false;
({rsa, Cipher, _}) ->
- lists:member(Cipher, Ciphers);
+ lists:member(cipher_atom(Cipher), Ciphers);
({dhe_rsa, Cipher, _}) ->
- lists:member(Cipher, Ciphers);
+ lists:member(cipher_atom(Cipher), Ciphers);
({ecdhe_rsa, Cipher, _}) when ECC == true ->
- lists:member(Cipher, Ciphers);
+ lists:member(cipher_atom(Cipher), Ciphers);
({rsa, Cipher, _, _}) ->
- lists:member(Cipher, Ciphers);
+ lists:member(cipher_atom(Cipher), Ciphers);
({dhe_rsa, Cipher, _,_}) ->
- lists:member(Cipher, Ciphers);
+ lists:member(cipher_atom(Cipher), Ciphers);
({ecdhe_rsa, Cipher, _,_}) when ECC == true ->
- lists:member(Cipher, Ciphers);
+ lists:member(cipher_atom(Cipher), Ciphers);
(_) ->
false
end,
@@ -930,44 +928,12 @@ anonymous_suites(Version) ->
Suites = ssl_cipher:anonymous_suites(Version),
ssl_cipher:filter_suites(Suites).
-psk_suites() ->
- Suites =
- [{psk, rc4_128, sha},
- {psk, '3des_ede_cbc', sha},
- {psk, aes_128_cbc, sha},
- {psk, aes_256_cbc, sha},
- {psk, aes_128_cbc, sha256},
- {psk, aes_256_cbc, sha384},
- {dhe_psk, rc4_128, sha},
- {dhe_psk, '3des_ede_cbc', sha},
- {dhe_psk, aes_128_cbc, sha},
- {dhe_psk, aes_256_cbc, sha},
- {dhe_psk, aes_128_cbc, sha256},
- {dhe_psk, aes_256_cbc, sha384},
- {rsa_psk, rc4_128, sha},
- {rsa_psk, '3des_ede_cbc', sha},
- {rsa_psk, aes_128_cbc, sha},
- {rsa_psk, aes_256_cbc, sha},
- {rsa_psk, aes_128_cbc, sha256},
- {rsa_psk, aes_256_cbc, sha384},
- {psk, aes_128_gcm, null, sha256},
- {psk, aes_256_gcm, null, sha384},
- {dhe_psk, aes_128_gcm, null, sha256},
- {dhe_psk, aes_256_gcm, null, sha384},
- {rsa_psk, aes_128_gcm, null, sha256},
- {rsa_psk, aes_256_gcm, null, sha384}],
+psk_suites(Version) ->
+ Suites = ssl_cipher:psk_suites(Version),
ssl_cipher:filter_suites(Suites).
-psk_anon_suites() ->
- Suites =
- [{psk, rc4_128, sha},
- {psk, '3des_ede_cbc', sha},
- {psk, aes_128_cbc, sha},
- {psk, aes_256_cbc, sha},
- {dhe_psk, rc4_128, sha},
- {dhe_psk, '3des_ede_cbc', sha},
- {dhe_psk, aes_128_cbc, sha},
- {dhe_psk, aes_256_cbc, sha}],
+psk_anon_suites(Version) ->
+ Suites = [Suite || Suite <- psk_suites(Version), is_psk_anon_suite(Suite)],
ssl_cipher:filter_suites(Suites).
srp_suites() ->
@@ -1089,14 +1055,16 @@ init_tls_version(Version, Config)
application:load(ssl),
application:set_env(ssl, dtls_protocol_version, Version),
ssl:start(),
- [{protocol, dtls}, {protocol_opts, [{protocol, dtls}]}|Config];
+ NewConfig = proplists:delete(protocol_opts, proplists:delete(protocol, Config)),
+ [{protocol, dtls}, {protocol_opts, [{protocol, dtls}]} | NewConfig];
init_tls_version(Version, Config) ->
ssl:stop(),
application:load(ssl),
application:set_env(ssl, protocol_version, Version),
ssl:start(),
- [{protocol, tls}|Config].
+ NewConfig = proplists:delete(protocol_opts, proplists:delete(protocol, Config)),
+ [{protocol, tls} | NewConfig].
sufficient_crypto_support(Version)
when Version == 'tlsv1.2'; Version == 'dtlsv1.2' ->
@@ -1231,19 +1199,37 @@ check_sane_openssl_version(Version) ->
enough_openssl_crl_support("OpenSSL 0." ++ _) -> false;
enough_openssl_crl_support(_) -> true.
-wait_for_openssl_server(Port) ->
- wait_for_openssl_server(Port, 10).
-wait_for_openssl_server(_, 0) ->
+wait_for_openssl_server(Port, tls) ->
+ do_wait_for_openssl_tls_server(Port, 10);
+wait_for_openssl_server(Port, dtls) ->
+ do_wait_for_openssl_dtls_server(Port, 10).
+
+do_wait_for_openssl_tls_server(_, 0) ->
exit(failed_to_connect_to_openssl);
-wait_for_openssl_server(Port, N) ->
+do_wait_for_openssl_tls_server(Port, N) ->
case gen_tcp:connect("localhost", Port, []) of
{ok, S} ->
gen_tcp:close(S);
_ ->
ct:sleep(?SLEEP),
- wait_for_openssl_server(Port, N-1)
+ do_wait_for_openssl_tls_server(Port, N-1)
end.
+do_wait_for_openssl_dtls_server(_, 0) ->
+ %%exit(failed_to_connect_to_openssl);
+ ok;
+do_wait_for_openssl_dtls_server(Port, N) ->
+ %% case gen_udp:open(0) of
+ %% {ok, S} ->
+ %% gen_udp:connect(S, "localhost", Port),
+ %% gen_udp:close(S);
+ %% _ ->
+ %% ct:sleep(?SLEEP),
+ %% do_wait_for_openssl_dtls_server(Port, N-1)
+ %% end.
+ ct:sleep(500),
+ do_wait_for_openssl_dtls_server(Port, N-1).
+
version_flag(tlsv1) ->
"-tls1";
version_flag('tlsv1.1') ->
@@ -1253,10 +1239,14 @@ version_flag('tlsv1.2') ->
version_flag(sslv3) ->
"-ssl3";
version_flag(sslv2) ->
- "-ssl2".
-
-filter_suites(Ciphers0) ->
- Version = tls_record:highest_protocol_version([]),
+ "-ssl2";
+version_flag('dtlsv1.2') ->
+ "-dtls1_2";
+version_flag('dtlsv1') ->
+ "-dtls1".
+
+filter_suites(Ciphers0, AtomVersion) ->
+ Version = tls_version(AtomVersion),
Supported0 = ssl_cipher:suites(Version)
++ ssl_cipher:anonymous_suites(Version)
++ ssl_cipher:psk_suites(Version)
@@ -1338,7 +1328,7 @@ protocol_version(Config) ->
protocol_version(Config, tuple) ->
case proplists:get_value(protocol, Config) of
dtls ->
- dtls_record:protocol_version(dtls_record:highest_protocol_version([]));
+ dtls_record:highest_protocol_version(dtls_record:supported_protocol_versions());
_ ->
tls_record:highest_protocol_version(tls_record:supported_protocol_versions())
end;
@@ -1372,6 +1362,7 @@ clean_env() ->
application:unset_env(ssl, session_cache_client_max),
application:unset_env(ssl, session_cache_server_max),
application:unset_env(ssl, ssl_pem_cache_clean),
+ application:unset_env(ssl, bypass_pem_cache),
application:unset_env(ssl, alert_timeout).
clean_start() ->
@@ -1379,3 +1370,105 @@ clean_start() ->
application:load(ssl),
clean_env(),
ssl:start().
+
+is_psk_anon_suite({psk, _,_}) ->
+ true;
+is_psk_anon_suite({dhe_psk,_,_}) ->
+ true;
+is_psk_anon_suite({psk, _,_,_}) ->
+ true;
+is_psk_anon_suite({dhe_psk, _,_,_}) ->
+ true;
+is_psk_anon_suite(_) ->
+ false.
+
+cipher_atom(aes_256_cbc) ->
+ aes_cbc256;
+cipher_atom(aes_128_cbc) ->
+ aes_cbc128;
+cipher_atom('3des_ede_cbc') ->
+ des_ede3;
+cipher_atom(Atom) ->
+ Atom.
+tls_version('dtlsv1' = Atom) ->
+ dtls_v1:corresponding_tls_version(dtls_record:protocol_version(Atom));
+tls_version('dtlsv1.2' = Atom) ->
+ dtls_v1:corresponding_tls_version(dtls_record:protocol_version(Atom));
+tls_version(Atom) ->
+ tls_record:protocol_version(Atom).
+
+dtls_hello() ->
+ [1,
+ <<0,1,4>>,
+ <<0,0>>,
+ <<0,0,0>>,
+ <<0,1,4>>,
+ <<254,253,88,
+ 156,129,61,
+ 131,216,15,
+ 131,194,242,
+ 46,154,190,
+ 20,228,234,
+ 234,150,44,
+ 62,96,96,103,
+ 127,95,103,
+ 23,24,42,138,
+ 13,142,32,57,
+ 230,177,32,
+ 210,154,152,
+ 188,121,134,
+ 136,53,105,
+ 118,96,106,
+ 103,231,223,
+ 133,10,165,
+ 50,32,211,
+ 227,193,14,
+ 181,143,48,
+ 66,0,0,100,0,
+ 255,192,44,
+ 192,48,192,
+ 36,192,40,
+ 192,46,192,
+ 50,192,38,
+ 192,42,0,159,
+ 0,163,0,107,
+ 0,106,0,157,
+ 0,61,192,43,
+ 192,47,192,
+ 35,192,39,
+ 192,45,192,
+ 49,192,37,
+ 192,41,0,158,
+ 0,162,0,103,
+ 0,64,0,156,0,
+ 60,192,10,
+ 192,20,0,57,
+ 0,56,192,5,
+ 192,15,0,53,
+ 192,8,192,18,
+ 0,22,0,19,
+ 192,3,192,13,
+ 0,10,192,9,
+ 192,19,0,51,
+ 0,50,192,4,
+ 192,14,0,47,
+ 1,0,0,86,0,0,
+ 0,14,0,12,0,
+ 0,9,108,111,
+ 99,97,108,
+ 104,111,115,
+ 116,0,10,0,
+ 58,0,56,0,14,
+ 0,13,0,25,0,
+ 28,0,11,0,12,
+ 0,27,0,24,0,
+ 9,0,10,0,26,
+ 0,22,0,23,0,
+ 8,0,6,0,7,0,
+ 20,0,21,0,4,
+ 0,5,0,18,0,
+ 19,0,1,0,2,0,
+ 3,0,15,0,16,
+ 0,17,0,11,0,
+ 2,1,0>>].
+
diff --git a/lib/ssl/test/ssl_to_openssl_SUITE.erl b/lib/ssl/test/ssl_to_openssl_SUITE.erl
index e99340822d..7a1dce70c2 100644
--- a/lib/ssl/test/ssl_to_openssl_SUITE.erl
+++ b/lib/ssl/test/ssl_to_openssl_SUITE.erl
@@ -42,7 +42,9 @@ all() ->
{group, 'tlsv1.2'},
{group, 'tlsv1.1'},
{group, 'tlsv1'},
- {group, 'sslv3'}
+ {group, 'sslv3'},
+ {group, 'dtlsv1.2'},
+ {group, 'dtlsv1'}
].
groups() ->
@@ -50,7 +52,10 @@ groups() ->
{'tlsv1.2', [], all_versions_tests() ++ alpn_tests() ++ npn_tests() ++ sni_server_tests()},
{'tlsv1.1', [], all_versions_tests() ++ alpn_tests() ++ npn_tests() ++ sni_server_tests()},
{'tlsv1', [], all_versions_tests()++ alpn_tests() ++ npn_tests() ++ sni_server_tests()},
- {'sslv3', [], all_versions_tests()}].
+ {'sslv3', [], all_versions_tests()},
+ {'dtlsv1.2', [], dtls_all_versions_tests()},
+ {'dtlsv1', [], dtls_all_versions_tests()}
+ ].
basic_tests() ->
[basic_erlang_client_openssl_server,
@@ -78,6 +83,25 @@ all_versions_tests() ->
expired_session,
ssl2_erlang_server_openssl_client
].
+dtls_all_versions_tests() ->
+ [
+ %%erlang_client_openssl_server,
+ erlang_server_openssl_client,
+ %%erlang_client_openssl_server_dsa_cert,
+ erlang_server_openssl_client_dsa_cert
+ %% This one works but gets port EXIT first some times
+ %%erlang_server_openssl_client_reuse_session
+ %%erlang_client_openssl_server_renegotiate,
+ %%erlang_client_openssl_server_nowrap_seqnum,
+ %%erlang_server_openssl_client_nowrap_seqnum,
+ %%erlang_client_openssl_server_no_server_ca_cert,
+ %%erlang_client_openssl_server_client_cert,
+ %%erlang_server_openssl_client_client_cert
+ %%ciphers_rsa_signed_certs,
+ %%ciphers_dsa_signed_certs,
+ %%erlang_client_bad_openssl_server,
+ %%expired_session
+ ].
alpn_tests() ->
[erlang_client_alpn_openssl_server_alpn,
@@ -284,7 +308,8 @@ basic_erlang_client_openssl_server(Config) when is_list(Config) ->
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+
+ ssl_test_lib:wait_for_openssl_server(Port, tls),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -357,7 +382,7 @@ erlang_client_openssl_server(Config) when is_list(Config) ->
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -431,7 +456,7 @@ erlang_client_openssl_server_dsa_cert(Config) when is_list(Config) ->
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -551,7 +576,7 @@ erlang_client_openssl_server_renegotiate(Config) when is_list(Config) ->
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -600,7 +625,7 @@ erlang_client_openssl_server_nowrap_seqnum(Config) when is_list(Config) ->
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -681,7 +706,7 @@ erlang_client_openssl_server_no_server_ca_cert(Config) when is_list(Config) ->
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -724,7 +749,7 @@ erlang_client_openssl_server_client_cert(Config) when is_list(Config) ->
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -856,7 +881,7 @@ erlang_client_bad_openssl_server(Config) when is_list(Config) ->
"-cert", CertFile, "-key", KeyFile],
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client0 = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -911,7 +936,7 @@ expired_session(Config) when is_list(Config) ->
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, tls),
Client0 =
ssl_test_lib:start_client([{node, ClientNode},
@@ -1399,7 +1424,7 @@ cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
ConnectionInfo = {ok, {Version, CipherSuite}},
@@ -1469,7 +1494,7 @@ start_erlang_client_and_openssl_server_with_opts(Config, ErlangClientOpts, Opens
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -1505,7 +1530,7 @@ start_erlang_client_and_openssl_server_for_alpn_negotiation(Config, Data, Callba
Args = ["s_server", "-msg", "-alpn", "http/1.1,spdy/2", "-accept", integer_to_list(Port), ssl_test_lib:version_flag(Version),
"-cert", CertFile, "-key", KeyFile],
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -1574,7 +1599,7 @@ start_erlang_client_and_openssl_server_for_alpn_npn_negotiation(Config, Data, Ca
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
@@ -1639,7 +1664,7 @@ start_erlang_client_and_openssl_server_for_npn_negotiation(Config, Data, Callbac
"-cert", CertFile, "-key", KeyFile],
OpensslPort = ssl_test_lib:portable_open_port(Exe, Args),
- ssl_test_lib:wait_for_openssl_server(Port),
+ ssl_test_lib:wait_for_openssl_server(Port, proplists:get_value(protocol, Config)),
Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
{host, Hostname},
diff --git a/lib/ssl/vsn.mk b/lib/ssl/vsn.mk
index 2cdb825d75..415a47949d 100644
--- a/lib/ssl/vsn.mk
+++ b/lib/ssl/vsn.mk
@@ -1 +1 @@
-SSL_VSN = 8.1
+SSL_VSN = 8.1.1
diff --git a/lib/stdlib/doc/src/c.xml b/lib/stdlib/doc/src/c.xml
index 55a77d1bc5..7666699183 100644
--- a/lib/stdlib/doc/src/c.xml
+++ b/lib/stdlib/doc/src/c.xml
@@ -52,13 +52,27 @@
<func>
<name name="c" arity="1"/>
<name name="c" arity="2"/>
- <fsummary>Compile and load code in a file.</fsummary>
+ <name name="c" arity="3"/>
+ <fsummary>Compile and load a file or module.</fsummary>
<desc>
- <p>Compiles and then purges and loads the code for a file.
- <c><anno>Options</anno></c> defaults to <c>[]</c>. Compilation is
- equivalent to:</p>
- <code type="none">
-compile:file(<anno>File</anno>, <anno>Options</anno> ++ [report_errors, report_warnings])</code>
+ <p>Compiles and then purges and loads the code for a module.
+ <c><anno>Module</anno></c> can be either a module name or a source
+ file path, with or without <c>.erl</c> extension.
+ <c><anno>Options</anno></c> defaults to <c>[]</c>.</p>
+ <p>If <c><anno>Module</anno></c> is an atom and is not the path of a
+ source file, then the code path is searched to locate the object
+ file for the module and extract its original compiler options and
+ source path. If the source file is not found in the original
+ location, <seealso
+ marker="filelib#find_source/1"><c>filelib:find_source/1</c></seealso>
+ is used to search for it relative to the directory of the object
+ file.</p>
+ <p>The source file is compiled with the the original
+ options appended to the given <c><anno>Options</anno></c>, the
+ output replacing the old object file if and only if compilation
+ succeeds. A function <c><anno>Filter</anno></c> can be specified
+ for removing elements from from the original compiler options
+ before the new options are added.</p>
<p>Notice that purging the code means that any processes
lingering in old code for the module are killed without
warning. For more information, see <c>code/3</c>.</p>
diff --git a/lib/stdlib/doc/src/erl_tar.xml b/lib/stdlib/doc/src/erl_tar.xml
index 24e7b64b9e..f28d8b425b 100644
--- a/lib/stdlib/doc/src/erl_tar.xml
+++ b/lib/stdlib/doc/src/erl_tar.xml
@@ -37,12 +37,13 @@
</modulesummary>
<description>
<p>This module archives and extract files to and from
- a tar file. This module supports the <c>ustar</c> format
- (IEEE Std 1003.1 and ISO/IEC&nbsp;9945-1). All modern <c>tar</c>
- programs (including GNU tar) can read this format. To ensure that
- that GNU tar produces a tar file that <c>erl_tar</c> can read,
- specify option <c>--format=ustar</c> to GNU tar.</p>
-
+ a tar file. This module supports reading most common tar formats,
+ namely v7, STAR, USTAR, and PAX, as well as some of GNU tar's extensions
+ to the USTAR format (sparse files most notably). It produces tar archives
+ in USTAR format, unless the files being archived require PAX format due to
+ restrictions in USTAR (such as unicode metadata, filename length, and more).
+ As such, <c>erl_tar</c> supports tar archives produced by most all modern
+ tar utilities, and produces tarballs which should be similarly portable.</p>
<p>By convention, the name of a tar file is to end in "<c>.tar</c>".
To abide to the convention, add "<c>.tar</c>" to the name.</p>
@@ -83,6 +84,8 @@
<p>If <seealso marker="kernel:file#native_name_encoding/0">
<c>file:native_name_encoding/0</c></seealso>
returns <c>latin1</c>, no translation of path names is done.</p>
+
+ <p>Unicode metadata stored in PAX headers is preserved</p>
</section>
<section>
@@ -104,21 +107,20 @@
<title>Limitations</title>
<list type="bulleted">
<item>
- <p>For maximum compatibility, it is safe to archive files with names
- up to 100 characters in length. Such tar files can generally be
- extracted by any <c>tar</c> program.</p>
- </item>
- <item>
- <p>For filenames exceeding 100 characters in length, the resulting tar
- file can only be correctly extracted by a POSIX-compatible <c>tar</c>
- program (such as Solaris <c>tar</c> or a modern GNU <c>tar</c>).</p>
- </item>
- <item>
- <p>Files with longer names than 256 bytes cannot be stored.</p>
+ <p>If you must remain compatible with the USTAR tar format, you must ensure file paths being
+ stored are less than 255 bytes in total, with a maximum filename component
+ length of 100 bytes. USTAR uses a header field (prefix) in addition to the name field, and
+ splits file paths longer than 100 bytes into two parts. This split is done on a directory boundary,
+ and is done in such a way to make the best use of the space available in those two fields, but in practice
+ this will often mean that you have less than 255 bytes for a path. <c>erl_tar</c> will
+ automatically upgrade the format to PAX to handle longer filenames, so this is only an issue if you
+ need to extract the archive with an older implementation of <c>erl_tar</c> or <c>tar</c> which does
+ not support PAX. In this case, the PAX headers will be extracted as regular files, and you will need to
+ apply them manually.</p>
</item>
<item>
- <p>The file name a symbolic link points is always limited
- to 100 characters.</p>
+ <p>Like the above, if you must remain USTAR compatible, you must also ensure than paths for
+ symbolic/hard links are no more than 100 bytes, otherwise PAX headers will be used.</p>
</item>
</list>
</section>
@@ -129,7 +131,9 @@
<fsummary>Add a file to an open tar file.</fsummary>
<type>
<v>TarDescriptor = term()</v>
- <v>Filename = filename()</v>
+ <v>FilenameOrBin = filename()|binary()</v>
+ <v>NameInArchive = filename()</v>
+ <v>Filename = filename()|{NameInArchive,FilenameOrBin}</v>
<v>Options = [Option]</v>
<v>Option = dereference|verbose|{chunks,ChunkSize}</v>
<v>ChunkSize = positive_integer()</v>
@@ -139,6 +143,9 @@
<desc>
<p>Adds a file to a tar file that has been opened for writing by
<seealso marker="#open/2"><c>open/1</c></seealso>.</p>
+ <p><c>NameInArchive</c> is the name under which the file becomes
+ stored in the tar file. The file gets this name when it is
+ extracted from the tar file.</p>
<p>Options:</p>
<taglist>
<tag><c>dereference</c></tag>
@@ -183,9 +190,6 @@
<seealso marker="#open/2"><c>open/2</c></seealso>. This function
accepts the same options as
<seealso marker="#add/3"><c>add/3</c></seealso>.</p>
- <p><c>NameInArchive</c> is the name under which the file becomes
- stored in the tar file. The file gets this name when it is
- extracted from the tar file.</p>
</desc>
</func>
@@ -206,8 +210,8 @@
<fsummary>Create a tar archive.</fsummary>
<type>
<v>Name = filename()</v>
- <v>FileList = [Filename|{NameInArchive, binary()},{NameInArchive,
- Filename}]</v>
+ <v>FileList = [Filename|{NameInArchive, FilenameOrBin}]</v>
+ <v>FilenameOrBin = filename()|binary()</v>
<v>Filename = filename()</v>
<v>NameInArchive = filename()</v>
<v>RetValue = ok|{error,{Name,Reason}}</v>
@@ -225,8 +229,8 @@
<fsummary>Create a tar archive with options.</fsummary>
<type>
<v>Name = filename()</v>
- <v>FileList = [Filename|{NameInArchive, binary()},{NameInArchive,
- Filename}]</v>
+ <v>FileList = [Filename|{NameInArchive, FilenameOrBin}]</v>
+ <v>FilenameOrBin = filename()|binary()</v>
<v>Filename = filename()</v>
<v>NameInArchive = filename()</v>
<v>OptionList = [Option]</v>
@@ -275,7 +279,8 @@
<name>extract(Name) -> RetValue</name>
<fsummary>Extract all files from a tar file.</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename() | {binary,binary()} | {file,Fd}</v>
+ <v>Fd = file_descriptor()</v>
<v>RetValue = ok|{error,{Name,Reason}}</v>
<v>Reason = term()</v>
</type>
@@ -294,8 +299,7 @@
<name>extract(Name, OptionList)</name>
<fsummary>Extract files from a tar file.</fsummary>
<type>
- <v>Name = filename() | {binary,Binary} | {file,Fd}</v>
- <v>Binary = binary()</v>
+ <v>Name = filename() | {binary,binary()} | {file,Fd}</v>
<v>Fd = file_descriptor()</v>
<v>OptionList = [Option]</v>
<v>Option = {cwd,Cwd}|{files,FileList}|keep_old_files|verbose|memory</v>
@@ -521,7 +525,7 @@ erl_tar:close(TarDesc)</code>
<name>table(Name) -> RetValue</name>
<fsummary>Retrieve the name of all files in a tar file.</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v>
<v>RetValue = {ok,[string()]}|{error,{Name,Reason}}</v>
<v>Reason = term()</v>
</type>
@@ -535,7 +539,7 @@ erl_tar:close(TarDesc)</code>
<fsummary>Retrieve name and information of all files in a tar file.
</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v>
</type>
<desc>
<p>Retrieves the names of all files in the tar file <c>Name</c>.</p>
@@ -546,7 +550,7 @@ erl_tar:close(TarDesc)</code>
<name>t(Name)</name>
<fsummary>Print the name of each file in a tar file.</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v>
</type>
<desc>
<p>Prints the names of all files in the tar file <c>Name</c> to the
@@ -559,7 +563,7 @@ erl_tar:close(TarDesc)</code>
<fsummary>Print name and information for each file in a tar file.
</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v>
</type>
<desc>
<p>Prints names and information about all files in the tar file
diff --git a/lib/stdlib/doc/src/ets.xml b/lib/stdlib/doc/src/ets.xml
index 5f5d2b7f36..05401a2d40 100644
--- a/lib/stdlib/doc/src/ets.xml
+++ b/lib/stdlib/doc/src/ets.xml
@@ -541,10 +541,6 @@ Error: fun containing local Erlang function calls
<c><anno>Tab</anno></c> is
not of the correct type, or if <c><anno>Item</anno></c> is not
one of the allowed values, a <c>badarg</c> exception is raised.</p>
- <warning>
- <p>In Erlang/OTP R11B and earlier, this function would not fail but
- return <c>undefined</c> for invalid values for <c>Item</c>.</p>
- </warning>
<p>In addition to the <c>{<anno>Item</anno>,<anno>Value</anno>}</c>
pairs defined for <seealso marker="#info/1"><c>info/1</c></seealso>,
the following items are allowed:</p>
diff --git a/lib/stdlib/doc/src/filelib.xml b/lib/stdlib/doc/src/filelib.xml
index 7c6380ce28..ad73fc254a 100644
--- a/lib/stdlib/doc/src/filelib.xml
+++ b/lib/stdlib/doc/src/filelib.xml
@@ -60,6 +60,12 @@
<datatype>
<name name="filename_all"/>
</datatype>
+ <datatype>
+ <name name="find_file_rule"/>
+ </datatype>
+ <datatype>
+ <name name="find_source_rule"/>
+ </datatype>
</datatypes>
<funcs>
@@ -226,7 +232,51 @@ filelib:wildcard("lib/**/*.{erl,hrl}")</code>
directory.</p>
</desc>
</func>
+
+ <func>
+ <name name="find_file" arity="2"/>
+ <name name="find_file" arity="3"/>
+ <fsummary>Find a file relative to a given directory.</fsummary>
+ <desc>
+ <p>Looks for a file of the given name by applying suffix rules to
+ the given directory path. For example, a rule <c>{"ebin", "src"}</c>
+ means that if the directory path ends with <c>"ebin"</c>, the
+ corresponding path ending in <c>"src"</c> should be searched.</p>
+ <p>If <c><anno>Rules</anno></c> is left out or is an empty list, the
+ default system rules are used. See also the Kernel application
+ parameter <seealso
+ marker="kernel:kernel_app#source_search_rules"><c>source_search_rules</c></seealso>.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="find_source" arity="1"/>
+ <fsummary>Find the source file for a given object file.</fsummary>
+ <desc>
+ <p>Equivalent to <c>find_source(Base, Dir)</c>, where <c>Dir</c> is
+ <c>filename:dirname(<anno>FilePath</anno>)</c> and <c>Base</c> is
+ <c>filename:basename(<anno>FilePath</anno>)</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="find_source" arity="2"/>
+ <name name="find_source" arity="3"/>
+ <fsummary>Find a source file relative to a given directory.</fsummary>
+ <desc>
+ <p>Applies file extension specific rules to find the source file for
+ a given object file relative to the object directory. For example,
+ for a file with the extension <c>.beam</c>, the default rule is to
+ look for a file with a corresponding extension <c>.erl</c> by
+ replacing the suffix <c>"ebin"</c> of the object directory path with
+ <c>"src"</c>.
+ The file search is done through <seealso
+ marker="#find_file/3"><c>find_file/3</c></seealso>. The directory of
+ the object file is always tried before any other directory specified
+ by the rules.</p>
+ <p>If <c><anno>Rules</anno></c> is left out or is an empty list, the
+ default system rules are used. See also the Kernel application
+ parameter <seealso
+ marker="kernel:kernel_app#source_search_rules"><c>source_search_rules</c></seealso>.</p>
+ </desc>
+ </func>
</funcs>
</erlref>
-
-
diff --git a/lib/stdlib/doc/src/filename.xml b/lib/stdlib/doc/src/filename.xml
index 2a413835d0..0ccca37a9d 100644
--- a/lib/stdlib/doc/src/filename.xml
+++ b/lib/stdlib/doc/src/filename.xml
@@ -356,10 +356,12 @@ true
<p>Finds the source filename and compiler options for a module.
The result can be fed to <seealso marker="compiler:compile#file/2">
<c>compile:file/2</c></seealso> to compile the file again.</p>
- <warning><p>It is not recommended to use this function. If possible,
- use the <seealso marker="beam_lib"><c>beam_lib(3)</c></seealso>
- module to extract the abstract code format from the Beam file and
- compile that instead.</p></warning>
+ <warning>
+ <p>This function is deprecated. Use <seealso marker="filelib#find_source/1">
+ <c>filelib:find_source/1</c></seealso> instead for finding source files.</p>
+ <p>If possible, use the <seealso marker="beam_lib"><c>beam_lib(3)</c></seealso>
+ module to extract the compiler options and the abstract code
+ format from the Beam file and compile that instead.</p></warning>
<p>Argument <c><anno>Beam</anno></c>, which can be a string or an atom,
specifies either the module name or the path to the source
code, with or without extension <c>".erl"</c>. In either
@@ -511,6 +513,33 @@ true
</func>
<func>
+ <name name="safe_relative_path" arity="1"/>
+ <fsummary>Sanitize a relative path to avoid directory traversal attacks.</fsummary>
+ <desc>
+ <p>Sanitizes the relative path by eliminating ".." and "."
+ components to protect against directory traversal attacks.
+ Either returns the sanitized path name, or the atom
+ <c>unsafe</c> if the path is unsafe.
+ The path is considered unsafe in the following circumstances:</p>
+ <list type="bulleted">
+ <item><p>The path is not relative.</p></item>
+ <item><p>A ".." component would climb up above the root of
+ the relative path.</p></item>
+ </list>
+ <p><em>Examples:</em></p>
+ <pre>
+1> <input>filename:safe_relative_path("dir/sub_dir/..").</input>
+"dir"
+2> <input>filename:safe_relative_path("dir/..").</input>
+[]
+3> <input>filename:safe_relative_path("dir/../..").</input>
+unsafe
+4> <input>filename:safe_relative_path("/abs/path").</input>
+unsafe</pre>
+ </desc>
+ </func>
+
+ <func>
<name name="split" arity="1"/>
<fsummary>Split a filename into its path components.</fsummary>
<desc>
diff --git a/lib/stdlib/doc/src/gen_fsm.xml b/lib/stdlib/doc/src/gen_fsm.xml
index de06987d38..719ab2b558 100644
--- a/lib/stdlib/doc/src/gen_fsm.xml
+++ b/lib/stdlib/doc/src/gen_fsm.xml
@@ -534,11 +534,6 @@ gen_fsm:sync_send_all_state_event -----> Module:handle_sync_event/4
the function call fails.</p>
<p>Return value <c>Reply</c> is defined in the return value
of <c>Module:StateName/3</c>.</p>
- <note>
- <p>The ancient behavior of sometimes consuming the server
- exit message if the server died during the call while
- linked to the client was removed in Erlang 5.6/OTP R12B.</p>
- </note>
</desc>
</func>
</funcs>
diff --git a/lib/stdlib/doc/src/gen_server.xml b/lib/stdlib/doc/src/gen_server.xml
index 4a7dd60858..662076b5f0 100644
--- a/lib/stdlib/doc/src/gen_server.xml
+++ b/lib/stdlib/doc/src/gen_server.xml
@@ -162,11 +162,6 @@ gen_server:abcast -----> Module:handle_cast/2
of <c>Module:handle_call/3</c>.</p>
<p>The call can fail for many reasons, including time-out and the
called <c>gen_server</c> process dying before or during the call.</p>
- <note>
- <p>The ancient behavior of sometimes consuming the server
- exit message if the server died during the call while
- linked to the client was removed in Erlang 5.6/OTP R12B.</p>
- </note>
</desc>
</func>
diff --git a/lib/stdlib/doc/src/gen_statem.xml b/lib/stdlib/doc/src/gen_statem.xml
index fd498ee82e..5eb13db1aa 100644
--- a/lib/stdlib/doc/src/gen_statem.xml
+++ b/lib/stdlib/doc/src/gen_statem.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2016</year>
+ <year>2016-2017</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -587,8 +587,8 @@ handle_event(_, _, State, Data) ->
<name name="state_enter"/>
<desc>
<p>
- If the state machine should use <em>state enter calls</em>
- is selected when starting the <c>gen_statem</c>
+ Whether the state machine should use <em>state enter calls</em>
+ or not is selected when starting the <c>gen_statem</c>
and after code change using the return value from
<seealso marker="#Module:callback_mode/0"><c>Module:callback_mode/0</c></seealso>.
</p>
@@ -606,7 +606,16 @@ handle_event(_, _, State, Data) ->
See
<seealso marker="#Module:StateName/3"><c>Module:StateName/3</c></seealso>
and
- <seealso marker="#Module:handle_event/4"><c>Module:handle_event/4</c></seealso>.
+ <seealso marker="#Module:handle_event/4"><c>Module:handle_event/4</c></seealso>.
+ Such a call can be repeated by returning a
+ <seealso marker="#type-state_callback_result">
+ <c>repeat_state</c>
+ </seealso>
+ or
+ <seealso marker="#type-state_callback_result">
+ <c>repeat_state_and_data</c>
+ </seealso>
+ tuple from the state callback.
</p>
<p>
If
@@ -625,7 +634,8 @@ handle_event(_, _, State, Data) ->
right before entering the initial state even though this
formally is not a state change.
In this case <c>OldState</c> will be the same as <c>State</c>,
- which can not happen for a subsequent state change.
+ which can not happen for a subsequent state change,
+ but will happen when repeating the state enter call.
</p>
</desc>
</datatype>
@@ -640,7 +650,15 @@ handle_event(_, _, State, Data) ->
<list type="ordered">
<item>
<p>
- If the state changes or is the initial state, and
+ If the state changes, is the initial state,
+ <seealso marker="#type-state_callback_result">
+ <c>repeat_state</c>
+ </seealso>
+ or
+ <seealso marker="#type-state_callback_result">
+ <c>repeat_state_and_data</c>
+ </seealso>
+ is used, and also
<seealso marker="#type-state_enter"><em>state enter calls</em></seealso>
are used, the <c>gen_statem</c> calls
the new state callback with arguments
@@ -983,6 +1001,33 @@ handle_event(_, _, State, Data) ->
</desc>
</datatype>
<datatype>
+ <name name="init_result"/>
+ <desc>
+ <p>
+ For a succesful initialization,
+ <c><anno>State</anno></c> is the initial
+ <seealso marker="#type-state"><c>state()</c></seealso>
+ and <c><anno>Data</anno></c> the initial server
+ <seealso marker="#type-data"><c>data()</c></seealso>
+ of the <c>gen_statem</c>.
+ </p>
+ <p>
+ The <seealso marker="#type-action"><c>Actions</c></seealso>
+ are executed when entering the first
+ <seealso marker="#type-state">state</seealso> just as for a
+ <seealso marker="#state callback">state callback</seealso>,
+ except that the action <c>postpone</c> is forced to
+ <c>false</c> since there is no event to postpone.
+ </p>
+ <p>
+ For an unsuccesful initialization,
+ <c>{stop,<anno>Reason</anno>}</c>
+ or <c>ignore</c> should be used; see
+ <seealso marker="#start_link/3"><c>start_link/3,4</c></seealso>.
+ </p>
+ </desc>
+ </datatype>
+ <datatype>
<name name="state_enter_result"/>
<desc>
<p>
@@ -1068,6 +1113,37 @@ handle_event(_, _, State, Data) ->
<c>{next_state,CurrentState,CurrentData,<anno>Actions</anno>}</c>.
</p>
</item>
+ <tag><c>repeat_state</c></tag>
+ <item>
+ <p>
+ The <c>gen_statem</c> keeps the current state, or
+ does a state transition to the current state if you like,
+ sets <c><anno>NewData</anno></c>,
+ and executes all <c><anno>Actions</anno></c>.
+ If the <c>gen_statem</c> runs with
+ <seealso marker="#type-state_enter"><em>state enter calls</em></seealso>,
+ the state enter call is repeated, see type
+ <seealso marker="#type-transition_option"><c>transition_option()</c></seealso>,
+ otherwise <c>repeat_state</c> is the same as
+ <c>keep_state</c>.
+ </p>
+ </item>
+ <tag><c>repeat_state_and_data</c></tag>
+ <item>
+ <p>
+ The <c>gen_statem</c> keeps the current state and data, or
+ does a state transition to the current state if you like,
+ and executes all <c><anno>Actions</anno></c>.
+ This is the same as
+ <c>{repeat_state,CurrentData,<anno>Actions</anno>}</c>.
+ If the <c>gen_statem</c> runs with
+ <seealso marker="#type-state_enter"><em>state enter calls</em></seealso>,
+ the state enter call is repeated, see type
+ <seealso marker="#type-transition_option"><c>transition_option()</c></seealso>,
+ otherwise <c>repeat_state_and_data</c> is the same as
+ <c>keep_state_and_data</c>.
+ </p>
+ </item>
<tag><c>stop</c></tag>
<item>
<p>
@@ -1609,29 +1685,33 @@ handle_event(_, _, State, Data) ->
It is recommended to use an atom as <c>Reason</c> since
it will be wrapped in an <c>{error,Reason}</c> tuple.
</p>
+ <p>
+ Also note when upgrading a <c>gen_statem</c>,
+ this function and hence
+ the <c>Change={advanced,Extra}</c> parameter in the
+ <seealso marker="sasl:appup"><c>appup</c></seealso> file
+ is not only needed to update the internal state
+ or to act on the <c>Extra</c> argument.
+ It is also needed if an upgrade or downgrade should change
+ <seealso marker="#type-callback_mode"><em>callback mode</em></seealso>,
+ or else the callback mode after the code change
+ will not be honoured,
+ most probably causing a server crash.
+ </p>
</desc>
</func>
<func>
- <name>Module:init(Args) -> Result</name>
+ <name>Module:init(Args) -> Result(StateType)</name>
<fsummary>
Optional function for initializing process and internal state.
</fsummary>
<type>
<v>Args = term()</v>
- <v>Result = {ok,State,Data}</v>
- <v>&nbsp;| {ok,State,Data,Actions}</v>
- <v>&nbsp;| {stop,Reason} | ignore</v>
- <v>State = <seealso marker="#type-state">state()</seealso></v>
- <v>
- Data = <seealso marker="#type-data">data()</seealso>
- </v>
<v>
- Actions =
- [<seealso marker="#type-action">action()</seealso>] |
- <seealso marker="#type-action">action()</seealso>
+ Result(StateType) =
+ <seealso marker="#type-init_result">init_result(StateType)</seealso>
</v>
- <v>Reason = term()</v>
</type>
<desc>
<marker id="Module:init-1"/>
@@ -1644,30 +1724,9 @@ handle_event(_, _, State, Data) ->
the implementation state and server data.
</p>
<p>
- <c>Args</c> is the <c>Args</c> argument provided to the start
+ <c>Args</c> is the <c>Args</c> argument provided to that start
function.
</p>
- <p>
- If the initialization is successful, the function is to
- return <c>{ok,State,Data}</c> or
- <c>{ok,State,Data,Actions}</c>.
- <c>State</c> is the initial
- <seealso marker="#type-state"><c>state()</c></seealso>
- and <c>Data</c> the initial server
- <seealso marker="#type-data"><c>data()</c></seealso>.
- </p>
- <p>
- The <seealso marker="#type-action"><c>Actions</c></seealso>
- are executed when entering the first
- <seealso marker="#type-state">state</seealso> just as for a
- <seealso marker="#state callback">state callback</seealso>.
- </p>
- <p>
- If the initialization fails,
- the function is to return <c>{stop,Reason}</c>
- or <c>ignore</c>; see
- <seealso marker="#start_link/3"><c>start_link/3,4</c></seealso>.
- </p>
<note>
<p>
This callback is optional, so a callback module does not need
@@ -1873,22 +1932,33 @@ handle_event(_, _, State, Data) ->
<seealso marker="#type-enter_action">actions</seealso>
that may be returned:
<seealso marker="#type-postpone"><c>postpone()</c></seealso>
- and
+ is not allowed since a <em>state enter call</em> is not
+ an event so there is no event to postpone, and
<seealso marker="#type-action"><c>{next_event,_,_}</c></seealso>
- are not allowed.
+ is not allowed since using <em>state enter calls</em>
+ should not affect how events are consumed and produced.
You may also not change states from this call.
Should you return <c>{next_state,NextState, ...}</c>
with <c>NextState =/= State</c> the <c>gen_statem</c> crashes.
- You are advised to use <c>{keep_state,...}</c> or
- <c>keep_state_and_data</c>.
+ It is possible to use <c>{repeat_state, ...}</c>,
+ <c>{repeat_state_and_data,_}</c> or
+ <c>repeat_state_and_data</c> but all of them makes little
+ sense since you immediately will be called again with a new
+ <em>state enter call</em> making this just a weird way
+ of looping, and there are better ways to loop in Erlang.
+ You are advised to use <c>{keep_state,...}</c>,
+ <c>{keep_state_and_data,_}</c> or
+ <c>keep_state_and_data</c> since you can not change states
+ from a <em>state enter call</em> anyway.
</p>
<p>
Note the fact that you can use
<seealso marker="erts:erlang#throw/1"><c>throw</c></seealso>
to return the result, which can be useful.
For example to bail out with <c>throw(keep_state_and_data)</c>
- from deep within complex code that is in no position to
- return <c>{next_state,State,Data}</c>.
+ from deep within complex code that can not
+ return <c>{next_state,State,Data}</c> because
+ <c>State</c> or <c>Data</c> is no longer in scope.
</p>
</desc>
</func>
@@ -1903,6 +1973,11 @@ handle_event(_, _, State, Data) ->
<v>Ignored = term()</v>
</type>
<desc>
+ <note>
+ <p>This callback is optional, so callback modules need not
+ export it. The <c>gen_statem</c> module provides a default
+ implementation without cleanup.</p>
+ </note>
<p>
This function is called by a <c>gen_statem</c>
when it is about to terminate. It is to be the opposite of
diff --git a/lib/stdlib/doc/src/notes.xml b/lib/stdlib/doc/src/notes.xml
index 0143686bb2..0e8bf3d27c 100644
--- a/lib/stdlib/doc/src/notes.xml
+++ b/lib/stdlib/doc/src/notes.xml
@@ -3163,7 +3163,7 @@
<p>
Two bugs in io:format for ~F.~Ps has been corrected. When
length(S) >= abs(F) > P, the precision P was incorrectly
- ignored. When F == P > lenght(S) the result was
+ ignored. When F == P > length(S) the result was
incorrectly left adjusted. Bug found by Ali Yakout who
also provided a fix.</p>
<p>
diff --git a/lib/stdlib/doc/src/shell.xml b/lib/stdlib/doc/src/shell.xml
index d6e8036d4e..f52bc39deb 100644
--- a/lib/stdlib/doc/src/shell.xml
+++ b/lib/stdlib/doc/src/shell.xml
@@ -165,12 +165,12 @@
<item>
<p>Evaluates <c>shell_default:help()</c>.</p>
</item>
- <tag><c>c(File)</c></tag>
+ <tag><c>c(Mod)</c></tag>
<item>
- <p>Evaluates <c>shell_default:c(File)</c>. This compiles
- and loads code in <c>File</c> and purges old versions of
- code, if necessary. Assumes that the file and module names
- are the same.</p>
+ <p>Evaluates <c>shell_default:c(Mod)</c>. This compiles and
+ loads the module <c>Mod</c> and purges old versions of the
+ code, if necessary. <c>Mod</c> can be either a module name or a
+ a source file path, with or without <c>.erl</c> extension.</p>
</item>
<tag><c>catch_exception(Bool)</c></tag>
<item>
diff --git a/lib/stdlib/doc/src/sys.xml b/lib/stdlib/doc/src/sys.xml
index 9091a46df9..45171f814d 100644
--- a/lib/stdlib/doc/src/sys.xml
+++ b/lib/stdlib/doc/src/sys.xml
@@ -83,8 +83,8 @@
<p>If the modules used to implement the process change dynamically
during runtime, the process must understand one more message. An
example is the <seealso marker="gen_event"><c>gen_event</c></seealso>
- processes. The message is <c>{get_modules, From}</c>.
- The reply to this message is <c>From ! {modules, Modules}</c>, where
+ processes. The message is <c>{_Label, {From, Ref}, get_modules}</c>.
+ The reply to this message is <c>From ! {Ref, Modules}</c>, where
<c>Modules</c> is a list of the currently active modules in the
process.</p>
<p>This message is used by the release handler to find which
diff --git a/lib/stdlib/src/Makefile b/lib/stdlib/src/Makefile
index d6c0ff8d8d..ed3dfb342c 100644
--- a/lib/stdlib/src/Makefile
+++ b/lib/stdlib/src/Makefile
@@ -130,7 +130,7 @@ HRL_FILES= \
../include/qlc.hrl \
../include/zip.hrl
-INTERNAL_HRL_FILES= dets.hrl
+INTERNAL_HRL_FILES= dets.hrl erl_tar.hrl
ERL_FILES= $(MODULES:%=%.erl)
@@ -228,7 +228,7 @@ $(EBIN)/dets_v9.beam: dets.hrl
$(EBIN)/erl_bits.beam: ../include/erl_bits.hrl
$(EBIN)/erl_compile.beam: ../include/erl_compile.hrl ../../kernel/include/file.hrl
$(EBIN)/erl_lint.beam: ../include/erl_bits.hrl
-$(EBIN)/erl_tar.beam: ../../kernel/include/file.hrl
+$(EBIN)/erl_tar.beam: ../../kernel/include/file.hrl erl_tar.hrl
$(EBIN)/file_sorter.beam: ../../kernel/include/file.hrl
$(EBIN)/filelib.beam: ../../kernel/include/file.hrl
$(EBIN)/filename.beam: ../../kernel/include/file.hrl
diff --git a/lib/stdlib/src/base64.erl b/lib/stdlib/src/base64.erl
index bf259e6691..0c8d817910 100644
--- a/lib/stdlib/src/base64.erl
+++ b/lib/stdlib/src/base64.erl
@@ -219,38 +219,49 @@ mime_decode_binary(Result, <<0:8,T/bits>>) ->
mime_decode_binary(Result, T);
mime_decode_binary(Result0, <<C:8,T/bits>>) ->
case element(C, ?DECODE_MAP) of
- Bits when is_integer(Bits) ->
- mime_decode_binary(<<Result0/bits,Bits:6>>, T);
- eq ->
- case tail_contains_more(T, false) of
- {<<>>, Eq} ->
- %% No more valid data.
- case bit_size(Result0) rem 8 of
- 0 ->
- %% '====' is not uncommon.
- Result0;
- 4 when Eq ->
- %% enforce at least one more '=' only ignoring illegals and spacing
- Split = byte_size(Result0) - 1,
- <<Result:Split/bytes,_:4>> = Result0,
- Result;
- 2 ->
- %% remove 2 bits
- Split = byte_size(Result0) - 1,
- <<Result:Split/bytes,_:2>> = Result0,
- Result
- end;
- {More, _} ->
- %% More valid data, skip the eq as invalid
- mime_decode_binary(Result0, More)
- end;
- _ ->
- mime_decode_binary(Result0, T)
+ Bits when is_integer(Bits) ->
+ mime_decode_binary(<<Result0/bits,Bits:6>>, T);
+ eq ->
+ mime_decode_binary_after_eq(Result0, T, false);
+ _ ->
+ mime_decode_binary(Result0, T)
end;
-mime_decode_binary(Result, <<>>) ->
+mime_decode_binary(Result, _) ->
true = is_binary(Result),
Result.
+mime_decode_binary_after_eq(Result, <<0:8,T/bits>>, Eq) ->
+ mime_decode_binary_after_eq(Result, T, Eq);
+mime_decode_binary_after_eq(Result0, <<C:8,T/bits>>, Eq) ->
+ case element(C, ?DECODE_MAP) of
+ bad ->
+ mime_decode_binary_after_eq(Result0, T, Eq);
+ ws ->
+ mime_decode_binary_after_eq(Result0, T, Eq);
+ eq ->
+ mime_decode_binary_after_eq(Result0, T, true);
+ Bits when is_integer(Bits) ->
+ %% More valid data, skip the eq as invalid
+ mime_decode_binary(<<Result0/bits,Bits:6>>, T)
+ end;
+mime_decode_binary_after_eq(Result0, <<>>, Eq) ->
+ %% No more valid data.
+ case bit_size(Result0) rem 8 of
+ 0 ->
+ %% '====' is not uncommon.
+ Result0;
+ 4 when Eq ->
+ %% enforce at least one more '=' only ignoring illegals and spacing
+ Split = byte_size(Result0) - 1,
+ <<Result:Split/bytes,_:4>> = Result0,
+ Result;
+ 2 ->
+ %% remove 2 bits
+ Split = byte_size(Result0) - 1,
+ <<Result:Split/bytes,_:2>> = Result0,
+ Result
+ end.
+
decode([], A) -> A;
decode([$=,$=,C2,C1|Cs], A) ->
Bits2x6 = (b64d(C1) bsl 18) bor (b64d(C2) bsl 12),
diff --git a/lib/stdlib/src/beam_lib.erl b/lib/stdlib/src/beam_lib.erl
index d7ee5c1f5d..461acf03be 100644
--- a/lib/stdlib/src/beam_lib.erl
+++ b/lib/stdlib/src/beam_lib.erl
@@ -63,7 +63,7 @@
-type label() :: integer().
-type chunkid() :: nonempty_string(). % approximation of the strings below
-%% "Abst" | "Attr" | "CInf" | "ExpT" | "ImpT" | "LocT" | "Atom".
+%% "Abst" | "Attr" | "CInf" | "ExpT" | "ImpT" | "LocT" | "Atom" | "AtU8".
-type chunkname() :: 'abstract_code' | 'attributes' | 'compile_info'
| 'exports' | 'labeled_exports'
| 'imports' | 'indexed_imports'
@@ -520,6 +520,8 @@ read_chunk_data(File0, ChunkNames0, Options)
end.
%% -> {ok, list()} | throw(Error)
+check_chunks([atoms | Ids], File, IL, L) ->
+ check_chunks(Ids, File, ["Atom", "AtU8" | IL], [{atom_chunk, atoms} | L]);
check_chunks([ChunkName | Ids], File, IL, L) when is_atom(ChunkName) ->
ChunkId = chunk_name_to_id(ChunkName, File),
check_chunks(Ids, File, [ChunkId | IL], [{ChunkId, ChunkName} | L]);
@@ -537,6 +539,10 @@ scan_beam(File, What0, AllowMissingChunks) ->
case scan_beam1(File, What0) of
{missing, _FD, Mod, Data, What} when AllowMissingChunks ->
{ok, Mod, [{Id, missing_chunk} || Id <- What] ++ Data};
+ {missing, _FD, Mod, Data, ["Atom"]} ->
+ {ok, Mod, Data};
+ {missing, _FD, Mod, Data, ["AtU8"]} ->
+ {ok, Mod, Data};
{missing, FD, _Mod, _Data, What} ->
error({missing_chunk, filename(FD), hd(What)});
R ->
@@ -581,18 +587,23 @@ scan_beam(FD, Pos, What, Mod, Data) ->
error({invalid_beam_file, filename(FD), Pos})
end.
-get_data(Cs, "Atom"=Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, Encoding) ->
NewCs = del_chunk(Id, Cs),
{NFD, Chunk} = get_chunk(Id, Pos, Size, FD),
<<_Num:32, Chunk2/binary>> = Chunk,
- {Module, _} = extract_atom(Chunk2),
+ {Module, _} = extract_atom(Chunk2, Encoding),
C = case Cs of
info ->
{Id, Pos, Size};
_ ->
{Id, Chunk}
end,
- scan_beam(NFD, Pos2, NewCs, Module, [C | Data]);
+ scan_beam(NFD, Pos2, NewCs, Module, [C | Data]).
+
+get_data(Cs, "Atom" = Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+ get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, latin1);
+get_data(Cs, "AtU8" = Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+ get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, utf8);
get_data(info, Id, FD, Size, Pos, Pos2, Mod, Data) ->
scan_beam(FD, Pos2, info, Mod, [{Id, Pos, Size} | Data]);
get_data(Chunks, Id, FD, Size, Pos, Pos2, Mod, Data) ->
@@ -624,6 +635,9 @@ get_chunk(Id, Pos, Size, FD) ->
{NFD, Chunk}
end.
+chunks_to_data([{atom_chunk, Name} | CNs], Chunks, File, Cs, Module, Atoms, L) ->
+ {NewAtoms, Ret} = chunk_to_data(Name, <<"">>, File, Cs, Atoms, Module),
+ chunks_to_data(CNs, Chunks, File, Cs, Module, NewAtoms, [Ret | L]);
chunks_to_data([{Id, Name} | CNs], Chunks, File, Cs, Module, Atoms, L) ->
{_Id, Chunk} = lists:keyfind(Id, 1, Chunks),
{NewAtoms, Ret} = chunk_to_data(Name, Chunk, File, Cs, Atoms, Module),
@@ -651,7 +665,7 @@ chunk_to_data(abstract_code=Id, Chunk, File, _Cs, AtomTable, Mod) ->
<<>> ->
{AtomTable, {Id, no_abstract_code}};
<<0:8,N:8,Mode0:N/binary,Rest/binary>> ->
- Mode = list_to_atom(binary_to_list(Mode0)),
+ Mode = binary_to_atom(Mode0, utf8),
decrypt_abst(Mode, Mod, File, Id, AtomTable, Rest);
_ ->
case catch binary_to_term(Chunk) of
@@ -683,7 +697,6 @@ chunk_to_data(ChunkId, Chunk, _File,
_Cs, AtomTable, _Module) when is_list(ChunkId) ->
{AtomTable, {ChunkId, Chunk}}. % Chunk is a binary
-chunk_name_to_id(atoms, _) -> "Atom";
chunk_name_to_id(indexed_imports, _) -> "ImpT";
chunk_name_to_id(imports, _) -> "ImpT";
chunk_name_to_id(exports, _) -> "ExpT";
@@ -738,25 +751,30 @@ atm(AT, N) ->
%% AT is updated.
ensure_atoms({empty, AT}, Cs) ->
- {_Id, AtomChunk} = lists:keyfind("Atom", 1, Cs),
- extract_atoms(AtomChunk, AT),
+ case lists:keyfind("AtU8", 1, Cs) of
+ {_Id, AtomChunk} when is_binary(AtomChunk) ->
+ extract_atoms(AtomChunk, AT, utf8);
+ _ ->
+ {_Id, AtomChunk} = lists:keyfind("Atom", 1, Cs),
+ extract_atoms(AtomChunk, AT, latin1)
+ end,
AT;
ensure_atoms(AT, _Cs) ->
AT.
-extract_atoms(<<_Num:32, B/binary>>, AT) ->
- extract_atoms(B, 1, AT).
+extract_atoms(<<_Num:32, B/binary>>, AT, Encoding) ->
+ extract_atoms(B, 1, AT, Encoding).
-extract_atoms(<<>>, _I, _AT) ->
+extract_atoms(<<>>, _I, _AT, _Encoding) ->
true;
-extract_atoms(B, I, AT) ->
- {Atom, B1} = extract_atom(B),
+extract_atoms(B, I, AT, Encoding) ->
+ {Atom, B1} = extract_atom(B, Encoding),
true = ets:insert(AT, {I, Atom}),
- extract_atoms(B1, I+1, AT).
+ extract_atoms(B1, I+1, AT, Encoding).
-extract_atom(<<Len, B/binary>>) ->
+extract_atom(<<Len, B/binary>>, Encoding) ->
<<SB:Len/binary, Tail/binary>> = B,
- {list_to_atom(binary_to_list(SB)), Tail}.
+ {binary_to_atom(SB, Encoding), Tail}.
%%% Utils.
@@ -856,12 +874,12 @@ significant_chunks() ->
%% for a module. They are listed in the order that they should be MD5:ed.
md5_chunks() ->
- ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"].
+ ["Atom", "AtU8", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"].
%% The following chunks are mandatory in every Beam file.
mandatory_chunks() ->
- ["Code", "ExpT", "ImpT", "StrT", "Atom"].
+ ["Code", "ExpT", "ImpT", "StrT"].
%%% ====================================================================
%%% The rest of the file handles encrypted debug info.
diff --git a/lib/stdlib/src/binary.erl b/lib/stdlib/src/binary.erl
index ccc827ca2d..45666fbcb4 100644
--- a/lib/stdlib/src/binary.erl
+++ b/lib/stdlib/src/binary.erl
@@ -24,7 +24,7 @@
-export_type([cp/0]).
--opaque cp() :: {'am' | 'bm', binary()}.
+-opaque cp() :: {'am' | 'bm', reference()}.
-type part() :: {Start :: non_neg_integer(), Length :: integer()}.
%%% BIFs.
diff --git a/lib/stdlib/src/c.erl b/lib/stdlib/src/c.erl
index d36630214c..52df2319dd 100644
--- a/lib/stdlib/src/c.erl
+++ b/lib/stdlib/src/c.erl
@@ -23,7 +23,7 @@
%% Avoid warning for local function error/2 clashing with autoimported BIF.
-compile({no_auto_import,[error/2]}).
--export([help/0,lc/1,c/1,c/2,nc/1,nc/2, nl/1,l/1,i/0,i/1,ni/0,
+-export([help/0,lc/1,c/1,c/2,c/3,nc/1,nc/2, nl/1,l/1,i/0,i/1,ni/0,
y/1, y/2,
lc_batch/0, lc_batch/1,
i/3,pid/3,m/0,m/1,mm/0,lm/0,
@@ -35,7 +35,7 @@
-export([appcall/4]).
-import(lists, [reverse/1,flatten/1,sublist/3,sort/1,keysort/2,
- concat/1,max/1,min/1,foreach/2,foldl/3,flatmap/2]).
+ max/1,min/1,foreach/2,foldl/3,flatmap/2]).
-import(io, [format/1, format/2]).
%%-----------------------------------------------------------------------
@@ -44,7 +44,7 @@
help() ->
io:put_chars(<<"bt(Pid) -- stack backtrace for a process\n"
- "c(File) -- compile and load code in <File>\n"
+ "c(Mod) -- compile and load module or file <Mod>\n"
"cd(Dir) -- change working directory\n"
"flush() -- flush any messages sent to the shell\n"
"help() -- help info\n"
@@ -72,32 +72,224 @@ help() ->
"xm(M) -- cross reference check a module\n"
"y(File) -- generate a Yecc parser\n">>).
-%% c(FileName)
-%% Compile a file/module.
-
--spec c(File) -> {'ok', Module} | 'error' when
- File :: file:name(),
- Module :: module().
+%% c(Module)
+%% Compile a module/file.
+
+-spec c(Module) -> {'ok', ModuleName} | 'error' when
+ Module :: file:name(),
+ ModuleName :: module().
+
+c(Module) -> c(Module, []).
+
+-spec c(Module, Options) -> {'ok', ModuleName} | 'error' when
+ Module :: file:name(),
+ Options :: [compile:option()] | compile:option(),
+ ModuleName :: module().
+
+c(Module, SingleOption) when not is_list(SingleOption) ->
+ c(Module, [SingleOption]);
+c(Module, Opts) when is_atom(Module) ->
+ %% either a module name or a source file name (possibly without
+ %% suffix); if such a source file exists, it is used to compile from
+ %% scratch with the given options, otherwise look for an object file
+ Suffix = case filename:extension(Module) of
+ "" -> src_suffix(Opts);
+ S -> S
+ end,
+ SrcFile = filename:rootname(Module, Suffix) ++ Suffix,
+ case filelib:is_file(SrcFile) of
+ true ->
+ compile_and_load(SrcFile, Opts);
+ false ->
+ c(Module, Opts, fun (_) -> true end)
+ end;
+c(Module, Opts) ->
+ %% we never interpret a string as a module name, only as a file
+ compile_and_load(Module, Opts).
-c(File) -> c(File, []).
+%% This tries to find an existing object file and use its compile_info and
+%% source path to recompile the module, overwriting the old object file.
+%% The Filter parameter is applied to the old compile options
--spec c(File, Options) -> {'ok', Module} | 'error' when
- File :: file:name(),
+-spec c(Module, Options, Filter) -> {'ok', ModuleName} | 'error' when
+ Module :: atom(),
Options :: [compile:option()],
- Module :: module().
+ Filter :: fun ((compile:option()) -> boolean()),
+ ModuleName :: module().
+
+c(Module, Options, Filter) when is_atom(Module) ->
+ case find_beam(Module) of
+ BeamFile when is_list(BeamFile) ->
+ c(Module, Options, Filter, BeamFile);
+ Error ->
+ {error, Error}
+ end.
+
+c(Module, Options, Filter, BeamFile) ->
+ case compile_info(Module, BeamFile) of
+ Info when is_list(Info) ->
+ case find_source(BeamFile, Info) of
+ SrcFile when is_list(SrcFile) ->
+ c(SrcFile, Options, Filter, BeamFile, Info);
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+c(SrcFile, NewOpts, Filter, BeamFile, Info) ->
+ %% Filter old options; also remove options that will be replaced.
+ %% Write new beam over old beam unless other outdir is specified.
+ F = fun (Opt) -> not is_outdir_opt(Opt) andalso Filter(Opt) end,
+ Options = (NewOpts ++ [{outdir,filename:dirname(BeamFile)}]
+ ++ lists:filter(F, old_options(Info))),
+ format("Recompiling ~s\n", [SrcFile]),
+ safe_recompile(SrcFile, Options, BeamFile).
+
+old_options(Info) ->
+ case lists:keyfind(options, 1, Info) of
+ {options, Opts} -> Opts;
+ false -> []
+ end.
+
+%% prefer the source path in the compile info if the file exists,
+%% otherwise do a standard source search relative to the beam file
+find_source(BeamFile, Info) ->
+ case lists:keyfind(source, 1, Info) of
+ {source, SrcFile} ->
+ case filelib:is_file(SrcFile) of
+ true -> SrcFile;
+ false -> find_source(BeamFile)
+ end;
+ _ ->
+ find_source(BeamFile)
+ end.
+
+find_source(BeamFile) ->
+ case filelib:find_source(BeamFile) of
+ {ok, SrcFile} -> SrcFile;
+ _ -> {error, no_source}
+ end.
-c(File, Opts0) when is_list(Opts0) ->
- Opts = [report_errors,report_warnings|Opts0],
+%% find the beam file for a module, preferring the path reported by code:which()
+%% if it still exists, or otherwise by searching the code path
+find_beam(Module) when is_atom(Module) ->
+ case code:which(Module) of
+ Beam when is_list(Beam), Beam =/= "" ->
+ case erlang:module_loaded(Module) of
+ false ->
+ Beam; % code:which/1 found this in the path
+ true ->
+ case filelib:is_file(Beam) of
+ true -> Beam;
+ false -> find_beam_1(Module) % file moved?
+ end
+ end;
+ Other when Other =:= ""; Other =:= cover_compiled ->
+ %% module is loaded but not compiled directly from source
+ find_beam_1(Module);
+ Error ->
+ Error
+ end.
+
+find_beam_1(Module) ->
+ File = atom_to_list(Module) ++ code:objfile_extension(),
+ case code:where_is_file(File) of
+ Beam when is_list(Beam) ->
+ Beam;
+ Error ->
+ Error
+ end.
+
+%% get the compile_info for a module
+%% -will report the info for the module in memory, if loaded
+%% -will try to find and examine the beam file if not in memory
+%% -will not cause a module to become loaded by accident
+compile_info(Module, Beam) when is_atom(Module) ->
+ case erlang:module_loaded(Module) of
+ true ->
+ %% getting the compile info for a loaded module should normally
+ %% work, but return an empty info list if it fails
+ try erlang:get_module_info(Module, compile)
+ catch _:_ -> []
+ end;
+ false ->
+ case beam_lib:chunks(Beam, [compile_info]) of
+ {ok, {_Module, [{compile_info, Info}]}} ->
+ Info;
+ Error ->
+ Error
+ end
+ end.
+
+%% compile module, backing up any existing target file and restoring the
+%% old version if compilation fails (this should only be used when we have
+%% an old beam file that we want to preserve)
+safe_recompile(File, Options, BeamFile) ->
+ %% Note that it's possible that because of options such as 'to_asm',
+ %% the compiler might not actually write a new beam file at all
+ Backup = BeamFile ++ ".bak",
+ case file:rename(BeamFile, Backup) of
+ Status when Status =:= ok; Status =:= {error,enoent} ->
+ case compile_and_load(File, Options) of
+ {ok, _} = Result ->
+ _ = if Status =:= ok -> file:delete(Backup);
+ true -> ok
+ end,
+ Result;
+ Error ->
+ _ = if Status =:= ok -> file:rename(Backup, BeamFile);
+ true -> ok
+ end,
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+%% Compile the file and load the resulting object code (if any).
+%% Automatically ensures that there is an outdir option, by default the
+%% directory of File, and that a 'from' option will be passed to match the
+%% actual source suffix if needed (unless already specified).
+compile_and_load(File, Opts0) when is_list(Opts0) ->
+ Opts = [report_errors, report_warnings
+ | ensure_from(filename:extension(File),
+ ensure_outdir(filename:dirname(File), Opts0))],
case compile:file(File, Opts) of
{ok,Mod} -> %Listing file.
- machine_load(Mod, File, Opts);
+ purge_and_load(Mod, File, Opts);
{ok,Mod,_Ws} -> %Warnings maybe turned on.
- machine_load(Mod, File, Opts);
+ purge_and_load(Mod, File, Opts);
Other -> %Errors go here
Other
end;
-c(File, Opt) ->
- c(File, [Opt]).
+compile_and_load(File, Opt) ->
+ compile_and_load(File, [Opt]).
+
+ensure_from(Suffix, Opts0) ->
+ case lists:partition(fun is_from_opt/1, Opts0++from_opt(Suffix)) of
+ {[Opt|_], Opts} -> [Opt | Opts];
+ {[], Opts} -> Opts
+ end.
+
+ensure_outdir(Dir, Opts0) ->
+ {[Opt|_], Opts} = lists:partition(fun is_outdir_opt/1,
+ Opts0++[{outdir,Dir}]),
+ [Opt | Opts].
+
+is_outdir_opt({outdir, _}) -> true;
+is_outdir_opt(_) -> false.
+
+is_from_opt(from_core) -> true;
+is_from_opt(from_asm) -> true;
+is_from_opt(from_beam) -> true;
+is_from_opt(_) -> false.
+
+from_opt(".core") -> [from_core];
+from_opt(".S") -> [from_asm];
+from_opt(".beam") -> [from_beam];
+from_opt(_) -> [].
%%% Obtain the 'outdir' option from the argument. Return "." if no
%%% such option was given.
@@ -113,18 +305,29 @@ outdir([Opt|Rest]) ->
outdir(Rest)
end.
+%% mimic how suffix is selected in compile:file().
+src_suffix([from_core|_]) -> ".core";
+src_suffix([from_asm|_]) -> ".S";
+src_suffix([from_beam|_]) -> ".beam";
+src_suffix([_|Opts]) -> src_suffix(Opts);
+src_suffix([]) -> ".erl".
+
%%% We have compiled File with options Opts. Find out where the
-%%% output file went to, and load it.
-machine_load(Mod, File, Opts) ->
+%%% output file went and load it, purging any old version.
+purge_and_load(Mod, File, Opts) ->
Dir = outdir(Opts),
- File2 = filename:join(Dir, filename:basename(File, ".erl")),
+ Base = filename:basename(File, src_suffix(Opts)),
+ OutFile = filename:join(Dir, Base),
case compile:output_generated(Opts) of
true ->
- Base = atom_to_list(Mod),
- case filename:basename(File, ".erl") of
+ case atom_to_list(Mod) of
Base ->
code:purge(Mod),
- check_load(code:load_abs(File2,Mod), Mod);
+ %% Note that load_abs() adds the object file suffix
+ case code:load_abs(OutFile, Mod) of
+ {error, _R}=Error -> Error;
+ _ -> {ok, Mod}
+ end;
_OtherMod ->
format("** Module name '~p' does not match file name '~tp' **~n",
[Mod,File]),
@@ -135,13 +338,6 @@ machine_load(Mod, File, Opts) ->
ok
end.
-%%% This function previously warned if the loaded module was
-%%% loaded from some other place than current directory.
-%%% Now, loading from other than current directory is supposed to work.
-%%% so this function does nothing special.
-check_load({error, _R} = Error, _) -> Error;
-check_load(_, Mod) -> {ok, Mod}.
-
%% Compile a list of modules
%% enables the nice unix shell cmd
%% erl -s c lc f1 f2 f3 @d c1=v1 @c2 @i IDir @o ODir -s erlang halt
diff --git a/lib/stdlib/src/dets.erl b/lib/stdlib/src/dets.erl
index 5bc9475fc8..e81383775b 100644
--- a/lib/stdlib/src/dets.erl
+++ b/lib/stdlib/src/dets.erl
@@ -1063,11 +1063,8 @@ foldl_bins([Bin | Bins], MP, Terms) ->
compile_match_spec(select, ?PATTERN_TO_OBJECT_MATCH_SPEC('_') = Spec) ->
{Spec, true};
compile_match_spec(select, Spec) ->
- case catch ets:match_spec_compile(Spec) of
- X when is_binary(X) ->
- {Spec, {match_spec, X}};
- _ ->
- badarg
+ try {Spec, {match_spec, ets:match_spec_compile(Spec)}}
+ catch error:_ -> badarg
end;
compile_match_spec(object, Pat) ->
compile_match_spec(select, ?PATTERN_TO_OBJECT_MATCH_SPEC(Pat));
diff --git a/lib/stdlib/src/edlin_expand.erl b/lib/stdlib/src/edlin_expand.erl
index 5f821caef0..a1a97af4c5 100644
--- a/lib/stdlib/src/edlin_expand.erl
+++ b/lib/stdlib/src/edlin_expand.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -101,44 +101,77 @@ match(Prefix, Alts, Extra0) ->
%% Return the list of names L in multiple columns.
format_matches(L) ->
- S = format_col(lists:sort(L), []),
+ {S1, Dots} = format_col(lists:sort(L), []),
+ S = case Dots of
+ true ->
+ {_, Prefix} = longest_common_head(vals(L)),
+ PrefixLen = length(Prefix),
+ case PrefixLen =< 3 of
+ true -> S1; % Do not replace the prefix with "...".
+ false ->
+ LeadingDotsL = leading_dots(L, PrefixLen),
+ {S2, _} = format_col(lists:sort(LeadingDotsL), []),
+ S2
+ end;
+ false -> S1
+ end,
["\n" | S].
format_col([], _) -> [];
-format_col(L, Acc) -> format_col(L, field_width(L), 0, Acc).
-
-format_col(X, Width, Len, Acc) when Width + Len > 79 ->
- format_col(X, Width, 0, ["\n" | Acc]);
-format_col([A|T], Width, Len, Acc0) ->
- H = case A of
- %% If it's a tuple {string(), integer()}, we assume it's an
- %% arity, and meant to be printed.
- {H0, I} when is_integer(I) ->
- H0 ++ "/" ++ integer_to_list(I);
- {H1, _} -> H1;
- H2 -> H2
- end,
- Acc = [io_lib:format("~-*ts", [Width,H]) | Acc0],
- format_col(T, Width, Len+Width, Acc);
-format_col([], _, _, Acc) ->
- lists:reverse(Acc, "\n").
-
-field_width(L) -> field_width(L, 0).
-
-field_width([{H,_}|T], W) ->
+format_col(L, Acc) ->
+ LL = 79,
+ format_col(L, field_width(L, LL), 0, Acc, LL, false).
+
+format_col(X, Width, Len, Acc, LL, Dots) when Width + Len > LL ->
+ format_col(X, Width, 0, ["\n" | Acc], LL, Dots);
+format_col([A|T], Width, Len, Acc0, LL, Dots) ->
+ {H0, R} = format_val(A),
+ Hmax = LL - length(R),
+ {H, NewDots} =
+ case length(H0) > Hmax of
+ true -> {io_lib:format("~-*ts", [Hmax - 3, H0]) ++ "...", true};
+ false -> {H0, Dots}
+ end,
+ Acc = [io_lib:format("~-*ts", [Width, H ++ R]) | Acc0],
+ format_col(T, Width, Len+Width, Acc, LL, NewDots);
+format_col([], _, _, Acc, _LL, Dots) ->
+ {lists:reverse(Acc, "\n"), Dots}.
+
+format_val({H, I}) when is_integer(I) ->
+ %% If it's a tuple {string(), integer()}, we assume it's an
+ %% arity, and meant to be printed.
+ {H, "/" ++ integer_to_list(I)};
+format_val({H, _}) ->
+ {H, ""};
+format_val(H) ->
+ {H, ""}.
+
+field_width(L, LL) -> field_width(L, 0, LL).
+
+field_width([{H,_}|T], W, LL) ->
case length(H) of
- L when L > W -> field_width(T, L);
- _ -> field_width(T, W)
+ L when L > W -> field_width(T, L, LL);
+ _ -> field_width(T, W, LL)
end;
-field_width([H|T], W) ->
+field_width([H|T], W, LL) ->
case length(H) of
- L when L > W -> field_width(T, L);
- _ -> field_width(T, W)
+ L when L > W -> field_width(T, L, LL);
+ _ -> field_width(T, W, LL)
end;
-field_width([], W) when W < 40 ->
+field_width([], W, LL) when W < LL - 3 ->
W + 4;
-field_width([], _) ->
- 40.
+field_width([], _, LL) ->
+ LL.
+
+vals([]) -> [];
+vals([{S, _}|L]) -> [S|vals(L)];
+vals([S|L]) -> [S|vals(L)].
+
+leading_dots([], _Len) -> [];
+leading_dots([{H, I}|L], Len) ->
+ [{"..." ++ nthtail(Len, H), I}|leading_dots(L, Len)];
+leading_dots([H|L], Len) ->
+ ["..." ++ nthtail(Len, H)|leading_dots(L, Len)].
longest_common_head([]) ->
no;
diff --git a/lib/stdlib/src/erl_eval.erl b/lib/stdlib/src/erl_eval.erl
index 40a34aa30f..eafee346eb 100644
--- a/lib/stdlib/src/erl_eval.erl
+++ b/lib/stdlib/src/erl_eval.erl
@@ -1306,6 +1306,7 @@ partial_eval(Expr) ->
ev_expr({op,_,Op,L,R}) -> erlang:Op(ev_expr(L), ev_expr(R));
ev_expr({op,_,Op,A}) -> erlang:Op(ev_expr(A));
ev_expr({integer,_,X}) -> X;
+ev_expr({char,_,X}) -> X;
ev_expr({float,_,X}) -> X;
ev_expr({atom,_,X}) -> X;
ev_expr({tuple,_,Es}) ->
diff --git a/lib/stdlib/src/erl_expand_records.erl b/lib/stdlib/src/erl_expand_records.erl
index 2280464bff..16220bceb4 100644
--- a/lib/stdlib/src/erl_expand_records.erl
+++ b/lib/stdlib/src/erl_expand_records.erl
@@ -30,13 +30,13 @@
-import(lists, [map/2,foldl/3,foldr/3,sort/1,reverse/1,duplicate/2]).
--record(exprec, {compile=[], % Compile flags
- vcount=0, % Variable counter
- calltype=#{}, % Call types
- records=dict:new(), % Record definitions
- strict_ra=[], % strict record accesses
- checked_ra=[] % successfully accessed records
- }).
+-record(exprec, {compile=[], % Compile flags
+ vcount=0, % Variable counter
+ calltype=#{}, % Call types
+ records=#{}, % Record definitions
+ strict_ra=[], % strict record accesses
+ checked_ra=[] % successfully accessed records
+ }).
-spec(module(AbsForms, CompileOptions) -> AbsForms2 when
AbsForms :: [erl_parse:abstract_form()],
@@ -72,7 +72,7 @@ init_calltype_imports([], Ctype) -> Ctype.
forms([{attribute,_,record,{Name,Defs}}=Attr | Fs], St0) ->
NDefs = normalise_fields(Defs),
- St = St0#exprec{records=dict:store(Name, NDefs, St0#exprec.records)},
+ St = St0#exprec{records=maps:put(Name, NDefs, St0#exprec.records)},
{Fs1, St1} = forms(Fs, St),
{[Attr | Fs1], St1};
forms([{function,L,N,A,Cs0} | Fs0], St0) ->
@@ -546,7 +546,7 @@ normalise_fields(Fs) ->
%% record_fields(RecordName, State)
%% find_field(FieldName, Fields)
-record_fields(R, St) -> dict:fetch(R, St#exprec.records).
+record_fields(R, St) -> maps:get(R, St#exprec.records).
find_field(F, [{record_field,_,{atom,_,F},Val} | _]) -> {ok,Val};
find_field(F, [_ | Fs]) -> find_field(F, Fs);
diff --git a/lib/stdlib/src/erl_parse.yrl b/lib/stdlib/src/erl_parse.yrl
index 9cd95705af..922455a6f2 100644
--- a/lib/stdlib/src/erl_parse.yrl
+++ b/lib/stdlib/src/erl_parse.yrl
@@ -2,7 +2,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -33,7 +33,6 @@ list tail
list_comprehension lc_expr lc_exprs
binary_comprehension
tuple
-%struct
record_expr record_tuple record_field record_fields
map_expr map_tuple map_field map_field_assoc map_field_exact map_fields map_key
if_expr if_clause if_clauses case_expr cr_clause cr_clauses receive_expr
@@ -108,9 +107,8 @@ type_sig -> fun_type 'when' type_guards : {type, ?anno('$1'), bounded_fun,
type_guards -> type_guard : ['$1'].
type_guards -> type_guard ',' type_guards : ['$1'|'$3'].
-type_guard -> atom '(' top_types ')' : {type, ?anno('$1'), constraint,
- ['$1', '$3']}.
-type_guard -> var '::' top_type : build_def('$1', '$3').
+type_guard -> atom '(' top_types ')' : build_compat_constraint('$1', '$3').
+type_guard -> var '::' top_type : build_constraint('$1', '$3').
top_types -> top_type : ['$1'].
top_types -> top_type ',' top_types : ['$1'|'$3'].
@@ -156,6 +154,7 @@ type -> '#' atom '{' field_types '}' : {type, ?anno('$1'),
record, ['$2'|'$4']}.
type -> binary_type : '$1'.
type -> integer : '$1'.
+type -> char : '$1'.
type -> 'fun' '(' ')' : {type, ?anno('$1'), 'fun', []}.
type -> 'fun' '(' fun_type_100 ')' : '$3'.
@@ -268,7 +267,6 @@ expr_max -> binary : '$1'.
expr_max -> list_comprehension : '$1'.
expr_max -> binary_comprehension : '$1'.
expr_max -> tuple : '$1'.
-%%expr_max -> struct : '$1'.
expr_max -> '(' expr ')' : '$2'.
expr_max -> 'begin' exprs 'end' : {block,?anno('$1'),'$2'}.
expr_max -> if_expr : '$1'.
@@ -327,10 +325,6 @@ lc_expr -> binary '<=' expr : {b_generate,?anno('$2'),'$1','$3'}.
tuple -> '{' '}' : {tuple,?anno('$1'),[]}.
tuple -> '{' exprs '}' : {tuple,?anno('$1'),'$2'}.
-
-%%struct -> atom tuple :
-%% {struct,?anno('$1'),element(3, '$1'),element(3, '$2')}.
-
map_expr -> '#' map_tuple :
{map, ?anno('$1'),'$2'}.
map_expr -> expr_max '#' map_tuple :
@@ -1056,13 +1050,13 @@ build_typed_attribute({atom,Aa,Attr},_) ->
end.
build_type_spec({Kind,Aa}, {SpecFun, TypeSpecs})
- when (Kind =:= spec) or (Kind =:= callback) ->
+ when Kind =:= spec ; Kind =:= callback ->
NewSpecFun =
case SpecFun of
{atom, _, Fun} ->
{Fun, find_arity_from_specs(TypeSpecs)};
- {{atom,_, Mod}, {atom,_, Fun}} ->
- {Mod,Fun,find_arity_from_specs(TypeSpecs)}
+ {{atom, _, Mod}, {atom, _, Fun}} ->
+ {Mod, Fun, find_arity_from_specs(TypeSpecs)}
end,
{attribute,Aa,Kind,{NewSpecFun, TypeSpecs}}.
@@ -1076,11 +1070,24 @@ find_arity_from_specs([Spec|_]) ->
{type, _, 'fun', [{type, _, product, Args},_]} = Fun,
length(Args).
-build_def({var, A, '_'}, _Types) ->
+%% The 'is_subtype(V, T)' syntax is not supported as of Erlang/OTP
+%% 19.0, but is kept for backward compatibility.
+build_compat_constraint({atom, _, is_subtype}, [{var, _, _}=LHS, Type]) ->
+ build_constraint(LHS, Type);
+build_compat_constraint({atom, _, is_subtype}, [LHS, _Type]) ->
+ ret_err(?anno(LHS), "bad type variable");
+build_compat_constraint({atom, A, Atom}, _Types) ->
+ ret_err(A, io_lib:format("unsupported constraint ~w", [Atom])).
+
+build_constraint({atom, _, is_subtype}, [{var, _, _}=LHS, Type]) ->
+ build_constraint(LHS, Type);
+build_constraint({atom, A, Atom}, _Foo) ->
+ ret_err(A, io_lib:format("unsupported constraint ~w", [Atom]));
+build_constraint({var, A, '_'}, _Types) ->
ret_err(A, "bad type variable");
-build_def(LHS, Types) ->
+build_constraint(LHS, Type) ->
IsSubType = {atom, ?anno(LHS), is_subtype},
- {type, ?anno(LHS), constraint, [IsSubType, [LHS, Types]]}.
+ {type, ?anno(LHS), constraint, [IsSubType, [LHS, Type]]}.
lift_unions(T1, {type, _Aa, union, List}) ->
{type, ?anno(T1), union, [T1|List]};
@@ -1573,13 +1580,17 @@ new_anno(Term) ->
Abstr :: erl_parse_tree().
anno_to_term(Abstract) ->
- map_anno(fun erl_anno:to_term/1, Abstract).
+ F = fun(Anno, Acc) -> {erl_anno:to_term(Anno), Acc} end,
+ {NewAbstract, []} = modify_anno1(Abstract, [], F),
+ NewAbstract.
-spec anno_from_term(Term) -> erl_parse_tree() when
Term :: term().
anno_from_term(Term) ->
- map_anno(fun erl_anno:from_term/1, Term).
+ F = fun(T, Acc) -> {erl_anno:from_term(T), Acc} end,
+ {NewTerm, []} = modify_anno1(Term, [], F),
+ NewTerm.
%% Forms.
modify_anno1({function,F,A}, Ac, _Mf) ->
diff --git a/lib/stdlib/src/erl_tar.erl b/lib/stdlib/src/erl_tar.erl
index a383a0fc67..086e77cd28 100644
--- a/lib/stdlib/src/erl_tar.erl
+++ b/lib/stdlib/src/erl_tar.erl
@@ -1,8 +1,8 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2016. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2017. All Rights Reserved.
+%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
@@ -14,191 +14,245 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
+%%
%% %CopyrightEnd%
%%
+%% This module implements extraction/creation of tar archives.
+%% It supports reading most common tar formats, namely V7, STAR,
+%% USTAR, GNU, BSD/libarchive, and PAX. It produces archives in USTAR
+%% format, unless it must use PAX headers, in which case it produces PAX
+%% format.
+%%
+%% The following references where used:
+%% http://www.freebsd.org/cgi/man.cgi?query=tar&sektion=5
+%% http://www.gnu.org/software/tar/manual/html_node/Standard.html
+%% http://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html
-module(erl_tar).
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% Purpose: Unix tar (tape archive) utility.
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
--export([init/3, create/2, create/3, extract/1, extract/2, table/1, table/2,
- open/2, close/1, add/3, add/4,
- t/1, tt/1, format_error/1]).
+-export([init/3,
+ create/2, create/3,
+ extract/1, extract/2,
+ table/1, table/2, t/1, tt/1,
+ open/2, close/1,
+ add/3, add/4,
+ format_error/1]).
-include_lib("kernel/include/file.hrl").
+-include_lib("erl_tar.hrl").
--record(add_opts,
- {read_info, % Fun to use for read file/link info.
- chunk_size = 0, % For file reading when sending to sftp. 0=do not chunk
- verbose = false :: boolean()}). % Verbose on/off.
-
-%% Opens a tar archive.
-
-init(UsrHandle, AccessMode, Fun) when is_function(Fun,2) ->
- {ok, {AccessMode,{tar_descriptor,UsrHandle,Fun}}}.
-
-%%%================================================================
-%%% The open function with friends is to keep the file and binary api of this module
-open(Name, Mode) ->
- case open_mode(Mode) of
- {ok, Access, Raw, Opts} ->
- open1(Name, Access, Raw, Opts);
- {error, Reason} ->
- {error, {Name, Reason}}
- end.
-
-open1({binary,Bin}, read, _Raw, Opts) ->
- case file:open(Bin, [ram,binary,read]) of
- {ok,File} ->
- _ = [ram_file:uncompress(File) || Opts =:= [compressed]],
- init(File,read,file_fun());
- Error ->
- Error
- end;
-open1({file, Fd}, read, _Raw, _Opts) ->
- init(Fd, read, file_fun());
-open1(Name, Access, Raw, Opts) ->
- case file:open(Name, Raw ++ [binary, Access|Opts]) of
- {ok, File} ->
- init(File, Access, file_fun());
- {error, Reason} ->
- {error, {Name, Reason}}
- end.
-
-file_fun() ->
- fun(write, {Fd,Data}) -> file:write(Fd, Data);
- (position, {Fd,Pos}) -> file:position(Fd, Pos);
- (read2, {Fd,Size}) -> file:read(Fd,Size);
- (close, Fd) -> file:close(Fd)
- end.
-
-%%% End of file and binary api (except for open_mode/1 downwards
-%%%================================================================
-
-%% Closes a tar archive.
-
-close({read, File}) ->
- ok = do_close(File);
-close({write, File}) ->
- PadResult = pad_file(File),
- ok = do_close(File),
- PadResult;
-close(_) ->
- {error, einval}.
-
-%% Adds a file to a tape archive.
-
-add(File, Name, Options) ->
- add(File, Name, Name, Options).
-add({write, File}, Name, NameInArchive, Options) ->
- Opts = #add_opts{read_info=fun(F) -> file:read_link_info(F) end},
- add1(File, Name, NameInArchive, add_opts(Options, Opts));
-add({read, _File}, _, _, _) ->
- {error, eacces};
-add(_, _, _, _) ->
- {error, einval}.
-
-add_opts([dereference|T], Opts) ->
- add_opts(T, Opts#add_opts{read_info=fun(F) -> file:read_file_info(F) end});
-add_opts([verbose|T], Opts) ->
- add_opts(T, Opts#add_opts{verbose=true});
-add_opts([{chunks,N}|T], Opts) ->
- add_opts(T, Opts#add_opts{chunk_size=N});
-add_opts([_|T], Opts) ->
- add_opts(T, Opts);
-add_opts([], Opts) ->
- Opts.
-
-%% Creates a tar file Name containing the given files.
-
-create(Name, Filenames) ->
- create(Name, Filenames, []).
-
-%% Creates a tar archive Name containing the given files.
-%% Accepted options: verbose, compressed, cooked
+%% Converts the short error reason to a descriptive string.
+-spec format_error(term()) -> string().
+format_error(invalid_tar_checksum) ->
+ "Checksum failed";
+format_error(bad_header) ->
+ "Unrecognized tar header format";
+format_error({bad_header, Reason}) ->
+ lists:flatten(io_lib:format("Unrecognized tar header format: ~p", [Reason]));
+format_error({invalid_header, negative_size}) ->
+ "Invalid header: negative size";
+format_error(invalid_sparse_header_size) ->
+ "Invalid sparse header: negative size";
+format_error(invalid_sparse_map_entry) ->
+ "Invalid sparse map entry";
+format_error({invalid_sparse_map_entry, Reason}) ->
+ lists:flatten(io_lib:format("Invalid sparse map entry: ~p", [Reason]));
+format_error(invalid_end_of_archive) ->
+ "Invalid end of archive";
+format_error(eof) ->
+ "Unexpected end of file";
+format_error(integer_overflow) ->
+ "Failed to parse numeric: integer overflow";
+format_error({misaligned_read, Pos}) ->
+ lists:flatten(io_lib:format("Read a block which was misaligned: block_size=~p pos=~p",
+ [?BLOCK_SIZE, Pos]));
+format_error(invalid_gnu_1_0_sparsemap) ->
+ "Invalid GNU sparse map (version 1.0)";
+format_error({invalid_gnu_0_1_sparsemap, Format}) ->
+ lists:flatten(io_lib:format("Invalid GNU sparse map (version ~s)", [Format]));
+format_error({Name,Reason}) ->
+ lists:flatten(io_lib:format("~ts: ~ts", [Name,format_error(Reason)]));
+format_error(Atom) when is_atom(Atom) ->
+ file:format_error(Atom);
+format_error(Term) ->
+ lists:flatten(io_lib:format("~tp", [Term])).
-create(Name, FileList, Options) ->
- Mode = lists:filter(fun(X) -> (X=:=compressed) or (X=:=cooked)
- end, Options),
- case open(Name, [write|Mode]) of
- {ok, TarFile} ->
- Add = fun({NmInA, NmOrBin}) ->
- add(TarFile, NmOrBin, NmInA, Options);
- (Nm) ->
- add(TarFile, Nm, Nm, Options)
- end,
- Result = foreach_while_ok(Add, FileList),
- case {Result, close(TarFile)} of
- {ok, Res} -> Res;
- {Res, _} -> Res
- end;
- Reason ->
- Reason
- end.
+%% Initializes a new reader given a custom file handle and I/O wrappers
+-spec init(handle(), write | read, file_op()) -> {ok, reader()} | {error, badarg}.
+init(Handle, AccessMode, Fun) when is_function(Fun, 2) ->
+ Reader = #reader{handle=Handle,access=AccessMode,func=Fun},
+ {ok, Pos, Reader2} = do_position(Reader, {cur, 0}),
+ {ok, Reader2#reader{pos=Pos}};
+init(_Handle, _AccessMode, _Fun) ->
+ {error, badarg}.
+%%%================================================================
%% Extracts all files from the tar file Name.
-
+-spec extract(open_handle()) -> ok | {error, term()}.
extract(Name) ->
extract(Name, []).
%% Extracts (all) files from the tar file Name.
-%% Options accepted: keep_old_files, {files, ListOfFilesToExtract}, verbose,
-%% {cwd, AbsoluteDirectory}
+%% Options accepted:
+%% - cooked: Opens the tar file without mode `raw`
+%% - compressed: Uncompresses the tar file when reading
+%% - memory: Returns the tar contents as a list of tuples {Name, Bin}
+%% - keep_old_files: Extracted files will not overwrite the destination
+%% - {files, ListOfFilesToExtract}: Only extract ListOfFilesToExtract
+%% - verbose: Prints verbose information about the extraction,
+%% - {cwd, AbsoluteDir}: Sets the current working directory for the extraction
+-spec extract(open_handle(), [extract_opt()]) ->
+ ok
+ | {ok, [{string(), binary()}]}
+ | {error, term()}.
+extract({binary, Bin}, Opts) when is_list(Opts) ->
+ do_extract({binary, Bin}, Opts);
+extract({file, Fd}, Opts) when is_list(Opts) ->
+ do_extract({file, Fd}, Opts);
+extract(#reader{}=Reader, Opts) when is_list(Opts) ->
+ do_extract(Reader, Opts);
+extract(Name, Opts) when is_list(Name); is_binary(Name), is_list(Opts) ->
+ do_extract(Name, Opts).
+
+do_extract(Handle, Opts) when is_list(Opts) ->
+ Opts2 = extract_opts(Opts),
+ Acc = if Opts2#read_opts.output =:= memory -> []; true -> ok end,
+ foldl_read(Handle, fun extract1/4, Acc, Opts2).
+
+extract1(eof, Reader, _, Acc) when is_list(Acc) ->
+ {ok, {ok, lists:reverse(Acc)}, Reader};
+extract1(eof, Reader, _, Acc) ->
+ {ok, Acc, Reader};
+extract1(#tar_header{name=Name,size=Size}=Header, Reader, Opts, Acc) ->
+ case check_extract(Name, Opts) of
+ true ->
+ case do_read(Reader, Size) of
+ {ok, Bin, Reader2} ->
+ case write_extracted_element(Header, Bin, Opts) of
+ ok ->
+ {ok, Acc, Reader2};
+ {ok, NameBin} when is_list(Acc) ->
+ {ok, [NameBin | Acc], Reader2};
+ {error, _} = Err ->
+ throw(Err)
+ end;
+ {error, _} = Err ->
+ throw(Err)
+ end;
+ false ->
+ {ok, Acc, skip_file(Reader)}
+ end.
-extract(Name, Opts) ->
- foldl_read(Name, fun extract1/4, ok, extract_opts(Opts)).
+%% Checks if the file Name should be extracted.
+check_extract(_, #read_opts{files=all}) ->
+ true;
+check_extract(Name, #read_opts{files=Files}) ->
+ ordsets:is_element(Name, Files).
-%% Returns a list of names of the files in the tar file Name.
-%% Options accepted: verbose
+%%%================================================================
+%% The following table functions produce a list of information about
+%% the files contained in the archive.
+-type filename() :: string().
+-type typeflag() :: regular | link | symlink |
+ char | block | directory |
+ fifo | reserved | unknown.
+-type mode() :: non_neg_integer().
+-type uid() :: non_neg_integer().
+-type gid() :: non_neg_integer().
+
+-type tar_entry() :: {filename(),
+ typeflag(),
+ non_neg_integer(),
+ calendar:datetime(),
+ mode(),
+ uid(),
+ gid()}.
+%% Returns a list of names of the files in the tar file Name.
+-spec table(open_handle()) -> {ok, [string()]} | {error, term()}.
table(Name) ->
table(Name, []).
%% Returns a list of names of the files in the tar file Name.
%% Options accepted: compressed, verbose, cooked.
-
-table(Name, Opts) ->
+-spec table(open_handle(), [compressed | verbose | cooked]) ->
+ {ok, [tar_entry()]} | {error, term()}.
+table(Name, Opts) when is_list(Opts) ->
foldl_read(Name, fun table1/4, [], table_opts(Opts)).
+table1(eof, Reader, _, Result) ->
+ {ok, {ok, lists:reverse(Result)}, Reader};
+table1(#tar_header{}=Header, Reader, #read_opts{verbose=Verbose}, Result) ->
+ Attrs = table1_attrs(Header, Verbose),
+ Reader2 = skip_file(Reader),
+ {ok, [Attrs|Result], Reader2}.
+
+%% Extracts attributes relevant to table1's output
+table1_attrs(#tar_header{typeflag=Typeflag,mode=Mode}=Header, true) ->
+ Type = typeflag(Typeflag),
+ Name = Header#tar_header.name,
+ Mtime = Header#tar_header.mtime,
+ Uid = Header#tar_header.uid,
+ Gid = Header#tar_header.gid,
+ Size = Header#tar_header.size,
+ {Name, Type, Size, Mtime, Mode, Uid, Gid};
+table1_attrs(#tar_header{name=Name}, _Verbose) ->
+ Name.
+
+typeflag(?TYPE_REGULAR) -> regular;
+typeflag(?TYPE_REGULAR_A) -> regular;
+typeflag(?TYPE_GNU_SPARSE) -> regular;
+typeflag(?TYPE_CONT) -> regular;
+typeflag(?TYPE_LINK) -> link;
+typeflag(?TYPE_SYMLINK) -> symlink;
+typeflag(?TYPE_CHAR) -> char;
+typeflag(?TYPE_BLOCK) -> block;
+typeflag(?TYPE_DIR) -> directory;
+typeflag(?TYPE_FIFO) -> fifo;
+typeflag(_) -> unknown.
+%%%================================================================
%% Comments for printing the contents of a tape archive,
%% meant to be invoked from the shell.
-t(Name) ->
+%% Prints each filename in the archive
+-spec t(file:filename()) -> ok | {error, term()}.
+t(Name) when is_list(Name); is_binary(Name) ->
case table(Name) of
- {ok, List} ->
- lists:foreach(fun(N) -> ok = io:format("~ts\n", [N]) end, List);
- Error ->
- Error
+ {ok, List} ->
+ lists:foreach(fun(N) -> ok = io:format("~ts\n", [N]) end, List);
+ Error ->
+ Error
end.
+%% Prints verbose information about each file in the archive
+-spec tt(open_handle()) -> ok | {error, term()}.
tt(Name) ->
case table(Name, [verbose]) of
- {ok, List} ->
- lists:foreach(fun print_header/1, List);
- Error ->
- Error
+ {ok, List} ->
+ lists:foreach(fun print_header/1, List);
+ Error ->
+ Error
end.
+%% Used by tt/1 to print a tar_entry tuple
+-spec print_header(tar_entry()) -> ok.
print_header({Name, Type, Size, Mtime, Mode, Uid, Gid}) ->
io:format("~s~s ~4w/~-4w ~7w ~s ~s\n",
- [type_to_string(Type), mode_to_string(Mode),
- Uid, Gid, Size, time_to_string(Mtime), Name]).
+ [type_to_string(Type), mode_to_string(Mode),
+ Uid, Gid, Size, time_to_string(Mtime), Name]).
-type_to_string(regular) -> "-";
+type_to_string(regular) -> "-";
type_to_string(directory) -> "d";
-type_to_string(link) -> "l";
-type_to_string(symlink) -> "s";
-type_to_string(char) -> "c";
-type_to_string(block) -> "b";
-type_to_string(fifo) -> "f";
-type_to_string(_) -> "?".
-
+type_to_string(link) -> "l";
+type_to_string(symlink) -> "s";
+type_to_string(char) -> "c";
+type_to_string(block) -> "b";
+type_to_string(fifo) -> "f";
+type_to_string(unknown) -> "?".
+
+%% Converts a numeric mode to its human-readable representation
mode_to_string(Mode) ->
mode_to_string(Mode, "xwrxwrxwr", []).
-
mode_to_string(Mode, [C|T], Acc) when Mode band 1 =:= 1 ->
mode_to_string(Mode bsr 1, T, [C|Acc]);
mode_to_string(Mode, [_|T], Acc) ->
@@ -206,6 +260,7 @@ mode_to_string(Mode, [_|T], Acc) ->
mode_to_string(_, [], Acc) ->
Acc.
+%% Converts a datetime tuple to a readable string
time_to_string({{Y, Mon, Day}, {H, Min, _}}) ->
io_lib:format("~s ~2w ~s:~s ~w", [month(Mon), Day, two_d(H), two_d(Min), Y]).
@@ -225,809 +280,1608 @@ month(10) -> "Oct";
month(11) -> "Nov";
month(12) -> "Dec".
-%% Converts the short error reason to a descriptive string.
+%%%================================================================
+%% The open function with friends is to keep the file and binary api of this module
+-type open_handle() :: file:filename()
+ | {binary, binary()}
+ | {file, term()}.
+-spec open(open_handle(), [write | compressed | cooked]) ->
+ {ok, reader()} | {error, term()}.
+open({binary, Bin}, Mode) when is_binary(Bin) ->
+ do_open({binary, Bin}, Mode);
+open({file, Fd}, Mode) ->
+ do_open({file, Fd}, Mode);
+open(Name, Mode) when is_list(Name); is_binary(Name) ->
+ do_open(Name, Mode).
+
+do_open(Name, Mode) when is_list(Mode) ->
+ case open_mode(Mode) of
+ {ok, Access, Raw, Opts} ->
+ open1(Name, Access, Raw, Opts);
+ {error, Reason} ->
+ {error, {Name, Reason}}
+ end.
-format_error(bad_header) -> "Bad directory header";
-format_error(eof) -> "Unexpected end of file";
-format_error(symbolic_link_too_long) -> "Symbolic link too long";
-format_error({Name,Reason}) ->
- lists:flatten(io_lib:format("~ts: ~ts", [Name,format_error(Reason)]));
-format_error(Atom) when is_atom(Atom) ->
- file:format_error(Atom);
-format_error(Term) ->
- lists:flatten(io_lib:format("~tp", [Term])).
+open1({binary,Bin}, read, _Raw, Opts) when is_binary(Bin) ->
+ case file:open(Bin, [ram,binary,read]) of
+ {ok,File} ->
+ _ = [ram_file:uncompress(File) || Opts =:= [compressed]],
+ {ok, #reader{handle=File,access=read,func=fun file_op/2}};
+ Error ->
+ Error
+ end;
+open1({file, Fd}, read, _Raw, _Opts) ->
+ Reader = #reader{handle=Fd,access=read,func=fun file_op/2},
+ case do_position(Reader, {cur, 0}) of
+ {ok, Pos, Reader2} ->
+ {ok, Reader2#reader{pos=Pos}};
+ {error, _} = Err ->
+ Err
+ end;
+open1(Name, Access, Raw, Opts) when is_list(Name) or is_binary(Name) ->
+ case file:open(Name, Raw ++ [binary, Access|Opts]) of
+ {ok, File} ->
+ {ok, #reader{handle=File,access=Access,func=fun file_op/2}};
+ {error, Reason} ->
+ {error, {Name, Reason}}
+ end.
+open_mode(Mode) ->
+ open_mode(Mode, false, [raw], []).
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%
-%%% Useful definitions (also start of implementation).
-%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% Offset for fields in the tar header.
-%% Note that these offsets are ZERO-based as in the POSIX standard
-%% document, while binaries use ONE-base offset. Caveat Programmer.
-
--define(th_name, 0).
--define(th_mode, 100).
--define(th_uid, 108).
--define(th_gid, 116).
--define(th_size, 124).
--define(th_mtime, 136).
--define(th_chksum, 148).
--define(th_typeflag, 156).
--define(th_linkname, 157).
--define(th_magic, 257).
--define(th_version, 263).
--define(th_prefix, 345).
-
-%% Length of these fields.
-
--define(th_name_len, 100).
--define(th_mode_len, 8).
--define(th_uid_len, 8).
--define(th_gid_len, 8).
--define(th_size_len, 12).
--define(th_mtime_len, 12).
--define(th_chksum_len, 8).
--define(th_linkname_len, 100).
--define(th_magic_len, 6).
--define(th_version_len, 2).
--define(th_prefix_len, 167).
-
--record(tar_header,
- {name, % Name of file.
- mode, % Mode bits.
- uid, % User id.
- gid, % Group id.
- size, % Size of file
- mtime, % Last modified (seconds since
- % Jan 1, 1970).
- chksum, % Checksum of header.
- typeflag = [], % Type of file.
- linkname = [], % Name of link.
- filler = [],
- prefix}). % Filename prefix.
-
--define(record_size, 512).
--define(block_size, (512*20)).
-
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%
-%%% Adding members to a tar archive.
-%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-add1(TarFile, Bin, NameInArchive, Opts) when is_binary(Bin) ->
- Now = calendar:now_to_local_time(erlang:timestamp()),
- Info = #file_info{size = byte_size(Bin),
- type = regular,
- access = read_write,
- atime = Now,
- mtime = Now,
- ctime = Now,
- mode = 8#100644,
- links = 1,
- major_device = 0,
- minor_device = 0,
- inode = 0,
- uid = 0,
- gid = 0},
- Header = create_header(NameInArchive, Info),
- add1(TarFile, NameInArchive, Header, Bin, Opts);
-add1(TarFile, Name, NameInArchive, Opts) ->
- case read_file_and_info(Name, Opts) of
- {ok, Bin, Info} when Info#file_info.type =:= regular ->
- Header = create_header(NameInArchive, Info),
- add1(TarFile, Name, Header, Bin, Opts);
- {ok, PointsTo, Info} when Info#file_info.type =:= symlink ->
- if
- length(PointsTo) > 100 ->
- {error,{PointsTo,symbolic_link_too_long}};
- true ->
- Info2 = Info#file_info{size=0},
- Header = create_header(NameInArchive, Info2, PointsTo),
- add1(TarFile, Name, Header, list_to_binary([]), Opts)
- end;
- {ok, _, Info} when Info#file_info.type =:= directory ->
- add_directory(TarFile, Name, NameInArchive, Info, Opts);
- {ok, _, #file_info{type=Type}} ->
- {error, {bad_file_type, Name, Type}};
- {error, Reason} ->
- {error, {Name, Reason}}
+open_mode(read, _, Raw, _) ->
+ {ok, read, Raw, []};
+open_mode(write, _, Raw, _) ->
+ {ok, write, Raw, []};
+open_mode([read|Rest], false, Raw, Opts) ->
+ open_mode(Rest, read, Raw, Opts);
+open_mode([write|Rest], false, Raw, Opts) ->
+ open_mode(Rest, write, Raw, Opts);
+open_mode([compressed|Rest], Access, Raw, Opts) ->
+ open_mode(Rest, Access, Raw, [compressed|Opts]);
+open_mode([cooked|Rest], Access, _Raw, Opts) ->
+ open_mode(Rest, Access, [], Opts);
+open_mode([], Access, Raw, Opts) ->
+ {ok, Access, Raw, Opts};
+open_mode(_, _, _, _) ->
+ {error, einval}.
+
+file_op(write, {Fd, Data}) ->
+ file:write(Fd, Data);
+file_op(position, {Fd, Pos}) ->
+ file:position(Fd, Pos);
+file_op(read2, {Fd, Size}) ->
+ file:read(Fd, Size);
+file_op(close, Fd) ->
+ file:close(Fd).
+
+%% Closes a tar archive.
+-spec close(reader()) -> ok | {error, term()}.
+close(#reader{access=read}=Reader) ->
+ ok = do_close(Reader);
+close(#reader{access=write}=Reader) ->
+ {ok, Reader2} = pad_file(Reader),
+ ok = do_close(Reader2),
+ ok;
+close(_) ->
+ {error, einval}.
+
+pad_file(#reader{pos=Pos}=Reader) ->
+ %% There must be at least two zero blocks at the end.
+ PadCurrent = skip_padding(Pos+?BLOCK_SIZE),
+ Padding = <<0:PadCurrent/unit:8>>,
+ do_write(Reader, [Padding, ?ZERO_BLOCK, ?ZERO_BLOCK]).
+
+
+%%%================================================================
+%% Creation/modification of tar archives
+
+%% Creates a tar file Name containing the given files.
+-spec create(file:filename(), filelist()) -> ok | {error, {string(), term()}}.
+create(Name, FileList) when is_list(Name); is_binary(Name) ->
+ create(Name, FileList, []).
+
+%% Creates a tar archive Name containing the given files.
+%% Accepted options: verbose, compressed, cooked
+-spec create(file:filename(), filelist(), [create_opt()]) ->
+ ok | {error, term()} | {error, {string(), term()}}.
+create(Name, FileList, Options) when is_list(Name); is_binary(Name) ->
+ Mode = lists:filter(fun(X) -> (X=:=compressed) or (X=:=cooked)
+ end, Options),
+ case open(Name, [write|Mode]) of
+ {ok, TarFile} ->
+ do_create(TarFile, FileList, Options);
+ {error, _} = Err ->
+ Err
end.
-add1(Tar, Name, Header, chunked, Options) ->
- add_verbose(Options, "a ~ts [chunked ", [Name]),
- try
- ok = do_write(Tar, Header),
- {ok,D} = file:open(Name, [read,binary]),
- {ok,NumBytes} = add_read_write_chunks(D, Tar, Options#add_opts.chunk_size, 0, Options),
- _ = file:close(D),
- ok = do_write(Tar, padding(NumBytes,?record_size))
- of
- ok ->
- add_verbose(Options, "~n", []),
- ok
- catch
- error:{badmatch,{error,Error}} ->
- add_verbose(Options, "~n", []),
- {error,{Name,Error}}
+do_create(TarFile, [], _Opts) ->
+ close(TarFile);
+do_create(TarFile, [{NameInArchive, NameOrBin}|Rest], Opts) ->
+ case add(TarFile, NameOrBin, NameInArchive, Opts) of
+ ok ->
+ do_create(TarFile, Rest, Opts);
+ {error, _} = Err ->
+ _ = close(TarFile),
+ Err
end;
-add1(Tar, Name, Header, Bin, Options) ->
- add_verbose(Options, "a ~ts~n", [Name]),
- do_write(Tar, [Header, Bin, padding(byte_size(Bin), ?record_size)]).
-
-add_read_write_chunks(D, Tar, ChunkSize, SumNumBytes, Options) ->
- case file:read(D, ChunkSize) of
- {ok,Bin} ->
- ok = do_write(Tar, Bin),
- add_verbose(Options, ".", []),
- add_read_write_chunks(D, Tar, ChunkSize, SumNumBytes+byte_size(Bin), Options);
- eof ->
- add_verbose(Options, "]", []),
- {ok,SumNumBytes};
- Other ->
- Other
+do_create(TarFile, [Name|Rest], Opts) ->
+ case add(TarFile, Name, Name, Opts) of
+ ok ->
+ do_create(TarFile, Rest, Opts);
+ {error, _} = Err ->
+ _ = close(TarFile),
+ Err
end.
-add_directory(TarFile, DirName, NameInArchive, Info, Options) ->
+%% Adds a file to a tape archive.
+-type add_type() :: string()
+ | {string(), string()}
+ | {string(), binary()}.
+-spec add(reader(), add_type(), [add_opt()]) -> ok | {error, term()}.
+add(Reader, {NameInArchive, Name}, Opts)
+ when is_list(NameInArchive), is_list(Name) ->
+ do_add(Reader, Name, NameInArchive, Opts);
+add(Reader, {NameInArchive, Bin}, Opts)
+ when is_list(NameInArchive), is_binary(Bin) ->
+ do_add(Reader, Bin, NameInArchive, Opts);
+add(Reader, Name, Opts) when is_list(Name) ->
+ do_add(Reader, Name, Name, Opts).
+
+
+-spec add(reader(), string() | binary(), string(), [add_opt()]) ->
+ ok | {error, term()}.
+add(Reader, NameOrBin, NameInArchive, Options)
+ when is_list(NameOrBin); is_binary(NameOrBin),
+ is_list(NameInArchive), is_list(Options) ->
+ do_add(Reader, NameOrBin, NameInArchive, Options).
+
+do_add(#reader{access=write}=Reader, Name, NameInArchive, Options)
+ when is_list(NameInArchive), is_list(Options) ->
+ Opts = #add_opts{read_info=fun(F) -> file:read_link_info(F) end},
+ add1(Reader, Name, NameInArchive, add_opts(Options, Opts));
+do_add(#reader{access=read},_,_,_) ->
+ {error, eacces};
+do_add(Reader,_,_,_) ->
+ {error, {badarg, Reader}}.
+
+add_opts([dereference|T], Opts) ->
+ add_opts(T, Opts#add_opts{read_info=fun(F) -> file:read_file_info(F) end});
+add_opts([verbose|T], Opts) ->
+ add_opts(T, Opts#add_opts{verbose=true});
+add_opts([{chunks,N}|T], Opts) ->
+ add_opts(T, Opts#add_opts{chunk_size=N});
+add_opts([_|T], Opts) ->
+ add_opts(T, Opts);
+add_opts([], Opts) ->
+ Opts.
+
+add1(#reader{}=Reader, Name, NameInArchive, #add_opts{read_info=ReadInfo}=Opts)
+ when is_list(Name) ->
+ Res = case ReadInfo(Name) of
+ {error, Reason0} ->
+ {error, {Name, Reason0}};
+ {ok, #file_info{type=symlink}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ {ok, Linkname} = file:read_link(Name),
+ Header = fileinfo_to_header(NameInArchive, Fi, Linkname),
+ add_header(Reader, Header, Opts);
+ {ok, #file_info{type=regular}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Fi, false),
+ {ok, Reader2} = add_header(Reader, Header, Opts),
+ FileSize = Header#tar_header.size,
+ {ok, FileSize, Reader3} = do_copy(Reader2, Name, Opts),
+ Padding = skip_padding(FileSize),
+ Pad = <<0:Padding/unit:8>>,
+ do_write(Reader3, Pad);
+ {ok, #file_info{type=directory}=Fi} ->
+ add_directory(Reader, Name, NameInArchive, Fi, Opts);
+ {ok, #file_info{}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Fi, false),
+ add_header(Reader, Header, Opts)
+ end,
+ case Res of
+ ok -> ok;
+ {ok, _Reader} -> ok;
+ {error, _Reason} = Err -> Err
+ end;
+add1(Reader, Bin, NameInArchive, Opts) when is_binary(Bin) ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Now = calendar:now_to_local_time(erlang:timestamp()),
+ Header = #tar_header{
+ name = NameInArchive,
+ size = byte_size(Bin),
+ typeflag = ?TYPE_REGULAR,
+ atime = Now,
+ mtime = Now,
+ ctime = Now,
+ mode = 8#100644},
+ {ok, Reader2} = add_header(Reader, Header, Opts),
+ Padding = skip_padding(byte_size(Bin)),
+ Data = [Bin, <<0:Padding/unit:8>>],
+ case do_write(Reader2, Data) of
+ {ok, _Reader3} -> ok;
+ {error, Reason} -> {error, {NameInArchive, Reason}}
+ end.
+
+add_directory(Reader, DirName, NameInArchive, Info, Opts) ->
case file:list_dir(DirName) of
- {ok, []} ->
- add_verbose(Options, "a ~ts~n", [DirName]),
- Header = create_header(NameInArchive, Info),
- do_write(TarFile, Header);
- {ok, Files} ->
- Add = fun (File) ->
- add1(TarFile,
- filename:join(DirName, File),
- filename:join(NameInArchive, File),
- Options) end,
- foreach_while_ok(Add, Files);
- {error, Reason} ->
- {error, {DirName, Reason}}
+ {ok, []} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Info, false),
+ add_header(Reader, Header, Opts);
+ {ok, Files} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ try add_files(Reader, Files, DirName, NameInArchive, Opts) of
+ ok -> ok;
+ {error, _} = Err -> Err
+ catch
+ throw:{error, {_Name, _Reason}} = Err -> Err;
+ throw:{error, Reason} -> {error, {DirName, Reason}}
+ end;
+ {error, Reason} ->
+ {error, {DirName, Reason}}
end.
-
-%% Creates a header for file in a tar file.
-
-create_header(Name, Info) ->
- create_header(Name, Info, []).
-create_header(Name, #file_info {mode=Mode, uid=Uid, gid=Gid,
- size=Size, mtime=Mtime0, type=Type}, Linkname) ->
- Mtime = posix_time(erlang:localtime_to_universaltime(Mtime0)),
- {Prefix,Suffix} = split_filename(Name),
- H0 = [to_string(Suffix, 100),
- to_octal(Mode, 8),
- to_octal(Uid, 8),
- to_octal(Gid, 8),
- to_octal(Size, ?th_size_len),
- to_octal(Mtime, ?th_mtime_len),
- <<" ">>,
- file_type(Type),
- to_string(Linkname, ?th_linkname_len),
- "ustar",0,
- "00",
- zeroes(?th_prefix-?th_version-?th_version_len),
- to_string(Prefix, ?th_prefix_len)],
- H = list_to_binary(H0),
- 512 = byte_size(H), %Assertion.
- ChksumString = to_octal(checksum(H), 6, [0,$\s]),
- <<Before:?th_chksum/binary,_:?th_chksum_len/binary,After/binary>> = H,
- [Before,ChksumString,After].
-
-file_type(regular) -> $0;
-file_type(symlink) -> $2;
-file_type(directory) -> $5.
-
-to_octal(Int, Count) when Count > 1 ->
- to_octal(Int, Count-1, [0]).
-
-to_octal(_, 0, Result) -> Result;
-to_octal(Int, Count, Result) ->
- to_octal(Int div 8, Count-1, [Int rem 8 + $0|Result]).
-
-to_string(Str0, Count) ->
- Str = case file:native_name_encoding() of
- utf8 ->
- unicode:characters_to_binary(Str0);
- latin1 ->
- list_to_binary(Str0)
- end,
- case byte_size(Str) of
- Size when Size < Count ->
- [Str|zeroes(Count-Size)];
- _ -> Str
+
+add_files(_Reader, [], _Dir, _DirInArchive, _Opts) ->
+ ok;
+add_files(Reader, [Name|Rest], Dir, DirInArchive, #add_opts{read_info=Info}=Opts) ->
+ FullName = filename:join(Dir, Name),
+ NameInArchive = filename:join(DirInArchive, Name),
+ Res = case Info(FullName) of
+ {error, Reason} ->
+ {error, {FullName, Reason}};
+ {ok, #file_info{type=directory}=Fi} ->
+ add_directory(Reader, FullName, NameInArchive, Fi, Opts);
+ {ok, #file_info{type=symlink}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ {ok, Linkname} = file:read_link(FullName),
+ Header = fileinfo_to_header(NameInArchive, Fi, Linkname),
+ add_header(Reader, Header, Opts);
+ {ok, #file_info{type=regular}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Fi, false),
+ {ok, Reader2} = add_header(Reader, Header, Opts),
+ FileSize = Header#tar_header.size,
+ {ok, FileSize, Reader3} = do_copy(Reader2, FullName, Opts),
+ Padding = skip_padding(FileSize),
+ Pad = <<0:Padding/unit:8>>,
+ do_write(Reader3, Pad);
+ {ok, #file_info{}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Fi, false),
+ add_header(Reader, Header, Opts)
+ end,
+ case Res of
+ ok -> add_files(Reader, Rest, Dir, DirInArchive, Opts);
+ {ok, ReaderNext} -> add_files(ReaderNext, Rest, Dir, DirInArchive, Opts);
+ {error, _} = Err -> Err
end.
-%% Pads out end of file.
-
-pad_file(File) ->
- {ok,Position} = do_position(File, {cur,0}),
- %% There must be at least two zero records at the end.
- Fill = case ?block_size - (Position rem ?block_size) of
- Fill0 when Fill0 < 2*?record_size ->
- %% We need to another block here to ensure that there
- %% are at least two zero records at the end.
- Fill0 + ?block_size;
- Fill0 ->
- %% Large enough.
- Fill0
- end,
- do_write(File, zeroes(Fill)).
-
-split_filename(Name) when length(Name) =< ?th_name_len ->
- {"", Name};
-split_filename(Name0) ->
- split_filename(lists:reverse(filename:split(Name0)), [], [], 0).
-
-split_filename([Comp|Rest], Prefix, Suffix, Len)
- when Len+length(Comp) < ?th_name_len ->
- split_filename(Rest, Prefix, [Comp|Suffix], Len+length(Comp)+1);
-split_filename([Comp|Rest], Prefix, Suffix, Len) ->
- split_filename(Rest, [Comp|Prefix], Suffix, Len+length(Comp)+1);
-split_filename([], Prefix, Suffix, _) ->
- {filename:join(Prefix),filename:join(Suffix)}.
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%
-%%% Retrieving files from a tape archive.
-%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% Options used when reading a tar archive.
-
--record(read_opts,
- {cwd :: string(), % Current working directory.
- keep_old_files = false :: boolean(), % Owerwrite or not.
- files = all, % Set of files to extract
- % (or all).
- output = file :: 'file' | 'memory',
- open_mode = [], % Open mode options.
- verbose = false :: boolean()}). % Verbose on/off.
+format_string(String, Size) when length(String) > Size ->
+ throw({error, {write_string, field_too_long}});
+format_string(String, Size) ->
+ Ascii = to_ascii(String),
+ if byte_size(Ascii) < Size ->
+ [Ascii, 0];
+ true ->
+ Ascii
+ end.
-extract_opts(List) ->
- extract_opts(List, default_options()).
+format_octal(Octal) ->
+ iolist_to_binary(io_lib:fwrite("~.8B", [Octal])).
+
+add_header(#reader{}=Reader, #tar_header{}=Header, Opts) ->
+ {ok, Iodata} = build_header(Header, Opts),
+ do_write(Reader, Iodata).
+
+write_to_block(Block, IoData, Start) when is_list(IoData) ->
+ write_to_block(Block, iolist_to_binary(IoData), Start);
+write_to_block(Block, Bin, Start) when is_binary(Bin) ->
+ Size = byte_size(Bin),
+ <<Head:Start/unit:8, _:Size/unit:8, Rest/binary>> = Block,
+ <<Head:Start/unit:8, Bin/binary, Rest/binary>>.
+
+build_header(#tar_header{}=Header, Opts) ->
+ #tar_header{
+ name=Name,
+ mode=Mode,
+ uid=Uid,
+ gid=Gid,
+ size=Size,
+ typeflag=Type,
+ linkname=Linkname,
+ uname=Uname,
+ gname=Gname,
+ devmajor=Devmaj,
+ devminor=Devmin
+ } = Header,
+ Mtime = datetime_to_posix(Header#tar_header.mtime),
+
+ Block0 = ?ZERO_BLOCK,
+ {Block1, Pax0} = write_string(Block0, ?V7_NAME, ?V7_NAME_LEN, Name, ?PAX_PATH, #{}),
+ Block2 = write_octal(Block1, ?V7_MODE, ?V7_MODE_LEN, Mode),
+ {Block3, Pax1} = write_numeric(Block2, ?V7_UID, ?V7_UID_LEN, Uid, ?PAX_UID, Pax0),
+ {Block4, Pax2} = write_numeric(Block3, ?V7_GID, ?V7_GID_LEN, Gid, ?PAX_GID, Pax1),
+ {Block5, Pax3} = write_numeric(Block4, ?V7_SIZE, ?V7_SIZE_LEN, Size, ?PAX_SIZE, Pax2),
+ {Block6, Pax4} = write_numeric(Block5, ?V7_MTIME, ?V7_MTIME_LEN, Mtime, ?PAX_NONE, Pax3),
+ {Block7, Pax5} = write_string(Block6, ?V7_TYPE, ?V7_TYPE_LEN, <<Type>>, ?PAX_NONE, Pax4),
+ {Block8, Pax6} = write_string(Block7, ?V7_LINKNAME, ?V7_LINKNAME_LEN,
+ Linkname, ?PAX_LINKPATH, Pax5),
+ {Block9, Pax7} = write_string(Block8, ?USTAR_UNAME, ?USTAR_UNAME_LEN,
+ Uname, ?PAX_UNAME, Pax6),
+ {Block10, Pax8} = write_string(Block9, ?USTAR_GNAME, ?USTAR_GNAME_LEN,
+ Gname, ?PAX_GNAME, Pax7),
+ {Block11, Pax9} = write_numeric(Block10, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN,
+ Devmaj, ?PAX_NONE, Pax8),
+ {Block12, Pax10} = write_numeric(Block11, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN,
+ Devmin, ?PAX_NONE, Pax9),
+ {Block13, Pax11} = set_path(Block12, Pax10),
+ PaxEntry = case maps:size(Pax11) of
+ 0 -> [];
+ _ -> build_pax_entry(Header, Pax11, Opts)
+ end,
+ Block14 = set_format(Block13, ?FORMAT_USTAR),
+ Block15 = set_checksum(Block14),
+ {ok, [PaxEntry, Block15]}.
+
+set_path(Block0, Pax) ->
+ %% only use ustar header when name is too long
+ case maps:get(?PAX_PATH, Pax, nil) of
+ nil ->
+ {Block0, Pax};
+ PaxPath ->
+ case split_ustar_path(PaxPath) of
+ {ok, UstarName, UstarPrefix} ->
+ {Block1, _} = write_string(Block0, ?V7_NAME, ?V7_NAME_LEN,
+ UstarName, ?PAX_NONE, #{}),
+ {Block2, _} = write_string(Block1, ?USTAR_PREFIX, ?USTAR_PREFIX_LEN,
+ UstarPrefix, ?PAX_NONE, #{}),
+ {Block2, maps:remove(?PAX_PATH, Pax)};
+ false ->
+ {Block0, Pax}
+ end
+ end.
-table_opts(List) ->
- read_opts(List, default_options()).
+set_format(Block0, Format)
+ when Format =:= ?FORMAT_USTAR; Format =:= ?FORMAT_PAX ->
+ Block1 = write_to_block(Block0, ?MAGIC_USTAR, ?USTAR_MAGIC),
+ write_to_block(Block1, ?VERSION_USTAR, ?USTAR_VERSION);
+set_format(_Block, Format) ->
+ throw({error, {invalid_format, Format}}).
+
+set_checksum(Block) ->
+ Checksum = compute_checksum(Block),
+ write_octal(Block, ?V7_CHKSUM, ?V7_CHKSUM_LEN, Checksum).
+
+build_pax_entry(Header, PaxAttrs, Opts) ->
+ Path = Header#tar_header.name,
+ Filename = filename:basename(Path),
+ Dir = filename:dirname(Path),
+ Path2 = filename:join([Dir, "PaxHeaders.0", Filename]),
+ AsciiPath = to_ascii(Path2),
+ Path3 = if byte_size(AsciiPath) > ?V7_NAME_LEN ->
+ binary_part(AsciiPath, 0, ?V7_NAME_LEN - 1);
+ true ->
+ AsciiPath
+ end,
+ Keys = maps:keys(PaxAttrs),
+ SortedKeys = lists:sort(Keys),
+ PaxFile = build_pax_file(SortedKeys, PaxAttrs),
+ Size = byte_size(PaxFile),
+ Padding = (?BLOCK_SIZE -
+ (byte_size(PaxFile) rem ?BLOCK_SIZE)) rem ?BLOCK_SIZE,
+ Pad = <<0:Padding/unit:8>>,
+ PaxHeader = #tar_header{
+ name=unicode:characters_to_list(Path3),
+ size=Size,
+ mtime=Header#tar_header.mtime,
+ atime=Header#tar_header.atime,
+ ctime=Header#tar_header.ctime,
+ typeflag=?TYPE_X_HEADER
+ },
+ {ok, PaxHeaderData} = build_header(PaxHeader, Opts),
+ [PaxHeaderData, PaxFile, Pad].
+
+build_pax_file(Keys, PaxAttrs) ->
+ build_pax_file(Keys, PaxAttrs, []).
+build_pax_file([], _, Acc) ->
+ unicode:characters_to_binary(Acc);
+build_pax_file([K|Rest], Attrs, Acc) ->
+ V = maps:get(K, Attrs),
+ Size = sizeof(K) + sizeof(V) + 3,
+ Size2 = sizeof(Size) + Size,
+ Key = to_string(K),
+ Value = to_string(V),
+ Record = unicode:characters_to_binary(io_lib:format("~B ~ts=~ts\n", [Size2, Key, Value])),
+ if byte_size(Record) =/= Size2 ->
+ Size3 = byte_size(Record),
+ Record2 = io_lib:format("~B ~ts=~ts\n", [Size3, Key, Value]),
+ build_pax_file(Rest, Attrs, [Acc, Record2]);
+ true ->
+ build_pax_file(Rest, Attrs, [Acc, Record])
+ end.
-default_options() ->
- {ok, Cwd} = file:get_cwd(),
- #read_opts{cwd=Cwd}.
+sizeof(Bin) when is_binary(Bin) ->
+ byte_size(Bin);
+sizeof(List) when is_list(List) ->
+ length(List);
+sizeof(N) when is_integer(N) ->
+ byte_size(integer_to_binary(N));
+sizeof(N) when is_float(N) ->
+ byte_size(float_to_binary(N)).
+
+to_string(Bin) when is_binary(Bin) ->
+ unicode:characters_to_list(Bin);
+to_string(List) when is_list(List) ->
+ List;
+to_string(N) when is_integer(N) ->
+ integer_to_list(N);
+to_string(N) when is_float(N) ->
+ float_to_list(N).
+
+split_ustar_path(Path) ->
+ Len = length(Path),
+ NotAscii = not is_ascii(Path),
+ if Len =< ?V7_NAME_LEN; NotAscii ->
+ false;
+ true ->
+ PathBin = binary:list_to_bin(Path),
+ case binary:split(PathBin, [<<$/>>], [global, trim_all]) of
+ [Part] when byte_size(Part) >= ?V7_NAME_LEN ->
+ false;
+ Parts ->
+ case lists:last(Parts) of
+ Name when byte_size(Name) >= ?V7_NAME_LEN ->
+ false;
+ Name ->
+ Parts2 = lists:sublist(Parts, length(Parts) - 1),
+ join_split_ustar_path(Parts2, {ok, Name, nil})
+ end
+ end
+ end.
-%% Parse options for extract.
+join_split_ustar_path([], Acc) ->
+ Acc;
+join_split_ustar_path([Part|_], {ok, _, nil})
+ when byte_size(Part) > ?USTAR_PREFIX_LEN ->
+ false;
+join_split_ustar_path([Part|_], {ok, _Name, Acc})
+ when (byte_size(Part)+byte_size(Acc)) > ?USTAR_PREFIX_LEN ->
+ false;
+join_split_ustar_path([Part|Rest], {ok, Name, nil}) ->
+ join_split_ustar_path(Rest, {ok, Name, Part});
+join_split_ustar_path([Part|Rest], {ok, Name, Acc}) ->
+ join_split_ustar_path(Rest, {ok, Name, <<Acc/binary,$/,Part/binary>>}).
+
+datetime_to_posix(DateTime) ->
+ Epoch = calendar:datetime_to_gregorian_seconds(?EPOCH),
+ Secs = calendar:datetime_to_gregorian_seconds(DateTime),
+ case Secs - Epoch of
+ N when N < 0 -> 0;
+ N -> N
+ end.
-extract_opts([keep_old_files|Rest], Opts) ->
- extract_opts(Rest, Opts#read_opts{keep_old_files=true});
-extract_opts([{cwd, Cwd}|Rest], Opts) ->
- extract_opts(Rest, Opts#read_opts{cwd=Cwd});
-extract_opts([{files, Files}|Rest], Opts) ->
- Set = ordsets:from_list(Files),
- extract_opts(Rest, Opts#read_opts{files=Set});
-extract_opts([memory|Rest], Opts) ->
- extract_opts(Rest, Opts#read_opts{output=memory});
-extract_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
- extract_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]});
-extract_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
- extract_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]});
-extract_opts([verbose|Rest], Opts) ->
- extract_opts(Rest, Opts#read_opts{verbose=true});
-extract_opts([Other|Rest], Opts) ->
- extract_opts(Rest, read_opts([Other], Opts));
-extract_opts([], Opts) ->
- Opts.
+write_octal(Block, Pos, Size, X) ->
+ Octal = zero_pad(format_octal(X), Size-1),
+ if byte_size(Octal) < Size ->
+ write_to_block(Block, Octal, Pos);
+ true ->
+ throw({error, {write_failed, octal_field_too_long}})
+ end.
-%% Common options for all read operations.
+write_string(Block, Pos, Size, Str, PaxAttr, Pax0) ->
+ NotAscii = not is_ascii(Str),
+ if PaxAttr =/= ?PAX_NONE andalso (length(Str) > Size orelse NotAscii) ->
+ Pax1 = maps:put(PaxAttr, Str, Pax0),
+ {Block, Pax1};
+ true ->
+ Formatted = format_string(Str, Size),
+ {write_to_block(Block, Formatted, Pos), Pax0}
+ end.
+write_numeric(Block, Pos, Size, X, PaxAttr, Pax0) ->
+ %% attempt octal
+ Octal = zero_pad(format_octal(X), Size-1),
+ if byte_size(Octal) < Size ->
+ {write_to_block(Block, [Octal, 0], Pos), Pax0};
+ PaxAttr =/= ?PAX_NONE ->
+ Pax1 = maps:put(PaxAttr, X, Pax0),
+ {Block, Pax1};
+ true ->
+ throw({error, {write_failed, numeric_field_too_long}})
+ end.
-read_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
- read_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]});
-read_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
- read_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]});
-read_opts([verbose|Rest], Opts) ->
- read_opts(Rest, Opts#read_opts{verbose=true});
-read_opts([_|Rest], Opts) ->
- read_opts(Rest, Opts);
-read_opts([], Opts) ->
- Opts.
+zero_pad(Str, Size) when byte_size(Str) >= Size ->
+ Str;
+zero_pad(Str, Size) ->
+ Padding = Size - byte_size(Str),
+ Pad = binary:copy(<<$0>>, Padding),
+ <<Pad/binary, Str/binary>>.
-foldl_read({AccessMode,TD={tar_descriptor,_UsrHandle,_AccessFun}}, Fun, Accu, Opts) ->
- case AccessMode of
- read ->
- foldl_read0(TD, Fun, Accu, Opts);
- _ ->
- {error,{read_mode_expected,AccessMode}}
- end;
-foldl_read(TarName, Fun, Accu, Opts) ->
- case open(TarName, [read|Opts#read_opts.open_mode]) of
- {ok, {read, File}} ->
- Result = foldl_read0(File, Fun, Accu, Opts),
- ok = do_close(File),
- Result;
- Error ->
- Error
+
+%%%================================================================
+%% Functions for creating or modifying tar archives
+
+read_block(Reader) ->
+ case do_read(Reader, ?BLOCK_SIZE) of
+ eof ->
+ throw({error, eof});
+ %% Two zero blocks mark the end of the archive
+ {ok, ?ZERO_BLOCK, Reader1} ->
+ case do_read(Reader1, ?BLOCK_SIZE) of
+ eof ->
+ % This is technically a malformed end-of-archive marker,
+ % as two ZERO_BLOCKs are expected as the marker,
+ % but if we've already made it this far, we should just ignore it
+ eof;
+ {ok, ?ZERO_BLOCK, _Reader2} ->
+ eof;
+ {ok, _Block, _Reader2} ->
+ throw({error, invalid_end_of_archive});
+ {error,_} = Err ->
+ throw(Err)
+ end;
+ {ok, Block, Reader1} when is_binary(Block) ->
+ {ok, Block, Reader1};
+ {error, _} = Err ->
+ throw(Err)
end.
-foldl_read0(File, Fun, Accu, Opts) ->
- case catch foldl_read1(Fun, Accu, File, Opts) of
- {'EXIT', Reason} ->
- exit(Reason);
- {error, {Reason, Format, Args}} ->
- read_verbose(Opts, Format, Args),
- {error, Reason};
- {error, Reason} ->
- {error, Reason};
- Ok ->
- Ok
+get_header(#reader{}=Reader) ->
+ case read_block(Reader) of
+ eof ->
+ eof;
+ {ok, Block, Reader1} ->
+ convert_header(Block, Reader1)
end.
-foldl_read1(Fun, Accu0, File, Opts) ->
- case get_header(File) of
- eof ->
- Fun(eof, File, Opts, Accu0);
- Header ->
- {ok, NewAccu} = Fun(Header, File, Opts, Accu0),
- foldl_read1(Fun, NewAccu, File, Opts)
+%% Converts the tar header to a record.
+to_v7(Bin) when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ #header_v7{
+ name=binary_part(Bin, ?V7_NAME, ?V7_NAME_LEN),
+ mode=binary_part(Bin, ?V7_MODE, ?V7_MODE_LEN),
+ uid=binary_part(Bin, ?V7_UID, ?V7_UID_LEN),
+ gid=binary_part(Bin, ?V7_GID, ?V7_GID_LEN),
+ size=binary_part(Bin, ?V7_SIZE, ?V7_SIZE_LEN),
+ mtime=binary_part(Bin, ?V7_MTIME, ?V7_MTIME_LEN),
+ checksum=binary_part(Bin, ?V7_CHKSUM, ?V7_CHKSUM_LEN),
+ typeflag=binary:at(Bin, ?V7_TYPE),
+ linkname=binary_part(Bin, ?V7_LINKNAME, ?V7_LINKNAME_LEN)
+ };
+to_v7(_) ->
+ {error, header_block_too_small}.
+
+to_gnu(#header_v7{}=V7, Bin)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ #header_gnu{
+ header_v7=V7,
+ magic=binary_part(Bin, ?GNU_MAGIC, ?GNU_MAGIC_LEN),
+ version=binary_part(Bin, ?GNU_VERSION, ?GNU_VERSION_LEN),
+ uname=binary_part(Bin, 265, 32),
+ gname=binary_part(Bin, 297, 32),
+ devmajor=binary_part(Bin, 329, 8),
+ devminor=binary_part(Bin, 337, 8),
+ atime=binary_part(Bin, 345, 12),
+ ctime=binary_part(Bin, 357, 12),
+ sparse=to_sparse_array(binary_part(Bin, 386, 24*4+1)),
+ real_size=binary_part(Bin, 483, 12)
+ }.
+
+to_star(#header_v7{}=V7, Bin)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ #header_star{
+ header_v7=V7,
+ magic=binary_part(Bin, ?USTAR_MAGIC, ?USTAR_MAGIC_LEN),
+ version=binary_part(Bin, ?USTAR_VERSION, ?USTAR_VERSION_LEN),
+ uname=binary_part(Bin, ?USTAR_UNAME, ?USTAR_UNAME_LEN),
+ gname=binary_part(Bin, ?USTAR_GNAME, ?USTAR_GNAME_LEN),
+ devmajor=binary_part(Bin, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN),
+ devminor=binary_part(Bin, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN),
+ prefix=binary_part(Bin, 345, 131),
+ atime=binary_part(Bin, 476, 12),
+ ctime=binary_part(Bin, 488, 12),
+ trailer=binary_part(Bin, ?STAR_TRAILER, ?STAR_TRAILER_LEN)
+ }.
+
+to_ustar(#header_v7{}=V7, Bin)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ #header_ustar{
+ header_v7=V7,
+ magic=binary_part(Bin, ?USTAR_MAGIC, ?USTAR_MAGIC_LEN),
+ version=binary_part(Bin, ?USTAR_VERSION, ?USTAR_VERSION_LEN),
+ uname=binary_part(Bin, ?USTAR_UNAME, ?USTAR_UNAME_LEN),
+ gname=binary_part(Bin, ?USTAR_GNAME, ?USTAR_GNAME_LEN),
+ devmajor=binary_part(Bin, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN),
+ devminor=binary_part(Bin, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN),
+ prefix=binary_part(Bin, 345, 155)
+ }.
+
+to_sparse_array(Bin) when is_binary(Bin) ->
+ MaxEntries = byte_size(Bin) div 24,
+ IsExtended = 1 =:= binary:at(Bin, 24*MaxEntries),
+ Entries = parse_sparse_entries(Bin, MaxEntries-1, []),
+ #sparse_array{
+ entries=Entries,
+ max_entries=MaxEntries,
+ is_extended=IsExtended
+ }.
+
+parse_sparse_entries(<<>>, _, Acc) ->
+ Acc;
+parse_sparse_entries(_, -1, Acc) ->
+ Acc;
+parse_sparse_entries(Bin, N, Acc) ->
+ case to_sparse_entry(binary_part(Bin, N*24, 24)) of
+ nil ->
+ parse_sparse_entries(Bin, N-1, Acc);
+ Entry = #sparse_entry{} ->
+ parse_sparse_entries(Bin, N-1, [Entry|Acc])
end.
-table1(eof, _, _, Result) ->
- {ok, lists:reverse(Result)};
-table1(Header = #tar_header{}, File, #read_opts{verbose=true}, Result) ->
- #tar_header{name=Name, size=Size, mtime=Mtime, typeflag=Type,
- mode=Mode, uid=Uid, gid=Gid} = Header,
- skip(File, Size),
- {ok, [{Name, Type, Size, posix_to_erlang_time(Mtime), Mode, Uid, Gid}|Result]};
-table1(#tar_header{name=Name, size=Size}, File, _, Result) ->
- skip(File, Size),
- {ok, [Name|Result]}.
-
-extract1(eof, _, _, Acc) ->
- if
- is_list(Acc) ->
- {ok, lists:reverse(Acc)};
- true ->
- Acc
- end;
-extract1(Header, File, Opts, Acc) ->
- Name = Header#tar_header.name,
- case check_extract(Name, Opts) of
- true ->
- {ok, Bin} = get_element(File, Header),
- case write_extracted_element(Header, Bin, Opts) of
- ok ->
- {ok, Acc};
- {ok, NameBin} when is_list(Acc) ->
- {ok, [NameBin | Acc]};
- {ok, NameBin} when Acc =:= ok ->
- {ok, [NameBin]}
- end;
- false ->
- ok = skip(File, Header#tar_header.size),
- {ok, Acc}
+-define(EMPTY_ENTRY, <<0,0,0,0,0,0,0,0,0,0,0,0>>).
+to_sparse_entry(Bin) when is_binary(Bin), byte_size(Bin) =:= 24 ->
+ OffsetBin = binary_part(Bin, 0, 12),
+ NumBytesBin = binary_part(Bin, 12, 12),
+ case {OffsetBin, NumBytesBin} of
+ {?EMPTY_ENTRY, ?EMPTY_ENTRY} ->
+ nil;
+ _ ->
+ #sparse_entry{
+ offset=parse_numeric(OffsetBin),
+ num_bytes=parse_numeric(NumBytesBin)}
end.
-%% Checks if the file Name should be extracted.
+-spec get_format(binary()) -> {ok, pos_integer(), header_v7()}
+ | ?FORMAT_UNKNOWN
+ | {error, term()}.
+get_format(Bin) when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ do_get_format(to_v7(Bin), Bin).
+
+do_get_format({error, _} = Err, _Bin) ->
+ Err;
+do_get_format(#header_v7{}=V7, Bin)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ Checksum = parse_octal(V7#header_v7.checksum),
+ Chk1 = compute_checksum(Bin),
+ Chk2 = compute_signed_checksum(Bin),
+ if Checksum =/= Chk1 andalso Checksum =/= Chk2 ->
+ ?FORMAT_UNKNOWN;
+ true ->
+ %% guess magic
+ Ustar = to_ustar(V7, Bin),
+ Star = to_star(V7, Bin),
+ Magic = Ustar#header_ustar.magic,
+ Version = Ustar#header_ustar.version,
+ Trailer = Star#header_star.trailer,
+ Format = if
+ Magic =:= ?MAGIC_USTAR, Trailer =:= ?TRAILER_STAR ->
+ ?FORMAT_STAR;
+ Magic =:= ?MAGIC_USTAR ->
+ ?FORMAT_USTAR;
+ Magic =:= ?MAGIC_GNU, Version =:= ?VERSION_GNU ->
+ ?FORMAT_GNU;
+ true ->
+ ?FORMAT_V7
+ end,
+ {ok, Format, V7}
+ end.
-check_extract(_, #read_opts{files=all}) ->
+unpack_format(Format, #header_v7{}=V7, Bin, Reader)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ Mtime = posix_to_erlang_time(parse_numeric(V7#header_v7.mtime)),
+ Header0 = #tar_header{
+ name=parse_string(V7#header_v7.name),
+ mode=parse_numeric(V7#header_v7.mode),
+ uid=parse_numeric(V7#header_v7.uid),
+ gid=parse_numeric(V7#header_v7.gid),
+ size=parse_numeric(V7#header_v7.size),
+ mtime=Mtime,
+ atime=Mtime,
+ ctime=Mtime,
+ typeflag=V7#header_v7.typeflag,
+ linkname=parse_string(V7#header_v7.linkname)
+ },
+ Typeflag = Header0#tar_header.typeflag,
+ Header1 = if Format > ?FORMAT_V7 ->
+ unpack_modern(Format, V7, Bin, Header0);
+ true ->
+ Name = Header0#tar_header.name,
+ Header0#tar_header{name=safe_join_path("", Name)}
+ end,
+ HeaderOnly = is_header_only_type(Typeflag),
+ Header2 = if HeaderOnly ->
+ Header1#tar_header{size=0};
+ true ->
+ Header1
+ end,
+ if Typeflag =:= ?TYPE_GNU_SPARSE ->
+ Gnu = to_gnu(V7, Bin),
+ RealSize = parse_numeric(Gnu#header_gnu.real_size),
+ {Sparsemap, Reader2} = parse_sparse_map(Gnu, Reader),
+ Header3 = Header2#tar_header{size=RealSize},
+ {Header3, new_sparse_file_reader(Reader2, Sparsemap, RealSize)};
+ true ->
+ FileReader = #reg_file_reader{
+ handle=Reader,
+ num_bytes=Header2#tar_header.size,
+ size=Header2#tar_header.size,
+ pos = 0
+ },
+ {Header2, FileReader}
+ end.
+
+unpack_modern(Format, #header_v7{}=V7, Bin, #tar_header{}=Header0)
+ when is_binary(Bin) ->
+ Typeflag = Header0#tar_header.typeflag,
+ Ustar = to_ustar(V7, Bin),
+ H0 = Header0#tar_header{
+ uname=parse_string(Ustar#header_ustar.uname),
+ gname=parse_string(Ustar#header_ustar.gname)},
+ H1 = if Typeflag =:= ?TYPE_CHAR
+ orelse Typeflag =:= ?TYPE_BLOCK ->
+ Ma = parse_numeric(Ustar#header_ustar.devmajor),
+ Mi = parse_numeric(Ustar#header_ustar.devminor),
+ H0#tar_header{
+ devmajor=Ma,
+ devminor=Mi
+ };
+ true ->
+ H0
+ end,
+ {Prefix, H2} = case Format of
+ ?FORMAT_USTAR ->
+ {parse_string(Ustar#header_ustar.prefix), H1};
+ ?FORMAT_STAR ->
+ Star = to_star(V7, Bin),
+ Prefix0 = parse_string(Star#header_star.prefix),
+ Atime0 = Star#header_star.atime,
+ Atime = posix_to_erlang_time(parse_numeric(Atime0)),
+ Ctime0 = Star#header_star.ctime,
+ Ctime = posix_to_erlang_time(parse_numeric(Ctime0)),
+ {Prefix0, H1#tar_header{
+ atime=Atime,
+ ctime=Ctime
+ }};
+ _ ->
+ {"", H1}
+ end,
+ Name = H2#tar_header.name,
+ H2#tar_header{name=safe_join_path(Prefix, Name)}.
+
+
+safe_join_path([], Name) ->
+ strip_slashes(Name, both);
+safe_join_path(Prefix, []) ->
+ strip_slashes(Prefix, right);
+safe_join_path(Prefix, Name) ->
+ filename:join(strip_slashes(Prefix, right), strip_slashes(Name, both)).
+
+strip_slashes(Str, Direction) ->
+ string:strip(Str, Direction, $/).
+
+new_sparse_file_reader(Reader, Sparsemap, RealSize) ->
+ true = validate_sparse_entries(Sparsemap, RealSize),
+ #sparse_file_reader{
+ handle = Reader,
+ num_bytes = RealSize,
+ pos = 0,
+ size = RealSize,
+ sparse_map = Sparsemap}.
+
+validate_sparse_entries(Entries, RealSize) ->
+ validate_sparse_entries(Entries, RealSize, 0, 0).
+validate_sparse_entries([], _RealSize, _I, _LastOffset) ->
true;
-check_extract(Name, #read_opts{files=Files}) ->
- ordsets:is_element(Name, Files).
+validate_sparse_entries([#sparse_entry{}=Entry|Rest], RealSize, I, LastOffset) ->
+ Offset = Entry#sparse_entry.offset,
+ NumBytes = Entry#sparse_entry.num_bytes,
+ if
+ Offset > ?MAX_INT64-NumBytes ->
+ throw({error, {invalid_sparse_map_entry, offset_too_large}});
+ Offset+NumBytes > RealSize ->
+ throw({error, {invalid_sparse_map_entry, offset_too_large}});
+ I > 0 andalso LastOffset > Offset ->
+ throw({error, {invalid_sparse_map_entry, overlapping_offsets}});
+ true ->
+ ok
+ end,
+ validate_sparse_entries(Rest, RealSize, I+1, Offset+NumBytes).
+
+
+-spec parse_sparse_map(header_gnu(), reader_type()) ->
+ {[sparse_entry()], reader_type()}.
+parse_sparse_map(#header_gnu{sparse=Sparse}, Reader)
+ when Sparse#sparse_array.is_extended ->
+ parse_sparse_map(Sparse, Reader, []);
+parse_sparse_map(#header_gnu{sparse=Sparse}, Reader) ->
+ {Sparse#sparse_array.entries, Reader}.
+parse_sparse_map(#sparse_array{is_extended=true,entries=Entries}, Reader, Acc) ->
+ case read_block(Reader) of
+ eof ->
+ throw({error, eof});
+ {ok, Block, Reader2} ->
+ Sparse2 = to_sparse_array(Block),
+ parse_sparse_map(Sparse2, Reader2, Entries++Acc)
+ end;
+parse_sparse_map(#sparse_array{entries=Entries}, Reader, Acc) ->
+ Sorted = lists:sort(fun (#sparse_entry{offset=A},#sparse_entry{offset=B}) ->
+ A =< B
+ end, Entries++Acc),
+ {Sorted, Reader}.
+
+%% Defined by taking the sum of the unsigned byte values of the
+%% entire header record, treating the checksum bytes to as ASCII spaces
+compute_checksum(<<H1:?V7_CHKSUM/binary,
+ H2:?V7_CHKSUM_LEN/binary,
+ Rest:(?BLOCK_SIZE - ?V7_CHKSUM - ?V7_CHKSUM_LEN)/binary,
+ _/binary>>) ->
+ C0 = checksum(H1) + (byte_size(H2) * $\s),
+ C1 = checksum(Rest),
+ C0 + C1.
+
+compute_signed_checksum(<<H1:?V7_CHKSUM/binary,
+ H2:?V7_CHKSUM_LEN/binary,
+ Rest:(?BLOCK_SIZE - ?V7_CHKSUM - ?V7_CHKSUM_LEN)/binary,
+ _/binary>>) ->
+ C0 = signed_checksum(H1) + (byte_size(H2) * $\s),
+ C1 = signed_checksum(Rest),
+ C0 + C1.
-get_header(File) ->
- case do_read(File, ?record_size) of
- eof ->
- throw({error,eof});
- {ok, Bin} when is_binary(Bin) ->
- convert_header(Bin);
- {ok, List} ->
- convert_header(list_to_binary(List));
- {error, Reason} ->
- throw({error, Reason})
- end.
+%% Returns the checksum of a binary.
+checksum(Bin) -> checksum(Bin, 0).
+checksum(<<A/unsigned,Rest/binary>>, Sum) ->
+ checksum(Rest, Sum+A);
+checksum(<<>>, Sum) -> Sum.
-%% Converts the tar header to a record.
+signed_checksum(Bin) -> signed_checksum(Bin, 0).
+signed_checksum(<<A/signed,Rest/binary>>, Sum) ->
+ signed_checksum(Rest, Sum+A);
+signed_checksum(<<>>, Sum) -> Sum.
+
+-spec parse_numeric(binary()) -> non_neg_integer().
+parse_numeric(<<>>) ->
+ 0;
+parse_numeric(<<First, _/binary>> = Bin) ->
+ %% check for base-256 format first
+ %% if the bit is set, then all following bits constitute a two's
+ %% complement encoded number in big-endian byte order
+ if
+ First band 16#80 =/= 0 ->
+ %% Handling negative numbers relies on the following identity:
+ %% -a-1 == ^a
+ %% If the number is negative, we use an inversion mask to invert
+ %% the data bytes and treat the value as an unsigned number
+ Inv = if First band 16#40 =/= 0 -> 16#00; true -> 16#FF end,
+ Bytes = binary:bin_to_list(Bin),
+ Reducer = fun (C, {I, X}) ->
+ C1 = C bxor Inv,
+ C2 = if I =:= 0 -> C1 band 16#7F; true -> C1 end,
+ if (X bsr 56) > 0 ->
+ throw({error,integer_overflow});
+ true ->
+ {I+1, (X bsl 8) bor C2}
+ end
+ end,
+ {_, N} = lists:foldl(Reducer, {0,0}, Bytes),
+ if (N bsr 63) > 0 ->
+ throw({error, integer_overflow});
+ true ->
+ if Inv =:= 16#FF ->
+ -1 bxor N;
+ true ->
+ N
+ end
+ end;
+ true ->
+ %% normal case is an octal number
+ parse_octal(Bin)
+ end.
-convert_header(Bin) when byte_size(Bin) =:= ?record_size ->
- case verify_checksum(Bin) of
- ok ->
- Hd = #tar_header{name=get_name(Bin),
- mode=from_octal(Bin, ?th_mode, ?th_mode_len),
- uid=from_octal(Bin, ?th_uid, ?th_uid_len),
- gid=from_octal(Bin, ?th_gid, ?th_gid_len),
- size=from_octal(Bin, ?th_size, ?th_size_len),
- mtime=from_octal(Bin, ?th_mtime, ?th_mtime_len),
- linkname=from_string(Bin,
- ?th_linkname, ?th_linkname_len),
- typeflag=typeflag(Bin)},
- convert_header1(Hd);
- eof ->
- eof
+parse_octal(Bin) when is_binary(Bin) ->
+ %% skip leading/trailing zero bytes and spaces
+ do_parse_octal(Bin, <<>>).
+do_parse_octal(<<>>, <<>>) ->
+ 0;
+do_parse_octal(<<>>, Acc) ->
+ case io_lib:fread("~8u", binary:bin_to_list(Acc)) of
+ {error, _} -> throw({error, invalid_tar_checksum});
+ {ok, [Octal], []} -> Octal;
+ {ok, _, _} -> throw({error, invalid_tar_checksum})
end;
-convert_header(Bin) when byte_size(Bin) =:= 0 ->
+do_parse_octal(<<$\s,Rest/binary>>, Acc) ->
+ do_parse_octal(Rest, Acc);
+do_parse_octal(<<0, Rest/binary>>, Acc) ->
+ do_parse_octal(Rest, Acc);
+do_parse_octal(<<C, Rest/binary>>, Acc) ->
+ do_parse_octal(Rest, <<Acc/binary, C>>).
+
+parse_string(Bin) when is_binary(Bin) ->
+ do_parse_string(Bin, <<>>).
+do_parse_string(<<>>, Acc) ->
+ case unicode:characters_to_list(Acc) of
+ Str when is_list(Str) ->
+ Str;
+ {incomplete, _Str, _Rest} ->
+ binary:bin_to_list(Acc);
+ {error, _Str, _Rest} ->
+ throw({error, {bad_header, invalid_string}})
+ end;
+do_parse_string(<<0, _/binary>>, Acc) ->
+ do_parse_string(<<>>, Acc);
+do_parse_string(<<C, Rest/binary>>, Acc) ->
+ do_parse_string(Rest, <<Acc/binary, C>>).
+
+convert_header(Bin, #reader{pos=Pos}=Reader)
+ when byte_size(Bin) =:= ?BLOCK_SIZE, (Pos rem ?BLOCK_SIZE) =:= 0 ->
+ case get_format(Bin) of
+ ?FORMAT_UNKNOWN ->
+ throw({error, bad_header});
+ {ok, Format, V7} ->
+ unpack_format(Format, V7, Bin, Reader);
+ {error, Reason} ->
+ throw({error, {bad_header, Reason}})
+ end;
+convert_header(Bin, #reader{pos=Pos}) when byte_size(Bin) =:= ?BLOCK_SIZE ->
+ throw({error, misaligned_read, Pos});
+convert_header(Bin, _Reader) when byte_size(Bin) =:= 0 ->
eof;
-convert_header(_Bin) ->
+convert_header(_Bin, _Reader) ->
throw({error, eof}).
-%% Basic sanity. Better set the element size to zero here if the type
-%% always is of zero length.
-
-convert_header1(H) when H#tar_header.typeflag =:= symlink, H#tar_header.size =/= 0 ->
- convert_header1(H#tar_header{size=0});
-convert_header1(H) when H#tar_header.typeflag =:= directory, H#tar_header.size =/= 0 ->
- convert_header1(H#tar_header{size=0});
-convert_header1(Header) ->
- Header.
-
-typeflag(Bin) ->
- [T] = binary_to_list(Bin, ?th_typeflag+1, ?th_typeflag+1),
- case T of
- 0 -> regular;
- $0 -> regular;
- $1 -> link;
- $2 -> symlink;
- $3 -> char;
- $4 -> block;
- $5 -> directory;
- $6 -> fifo;
- $7 -> regular;
- _ -> unknown
+%% Creates a partially-populated header record based
+%% on the provided file_info record. If the file is
+%% a symlink, then `link` is used as the link target.
+%% If the file is a directory, a slash is appended to the name.
+fileinfo_to_header(Name, #file_info{}=Fi, Link) when is_list(Name) ->
+ BaseHeader = #tar_header{name=Name,
+ mtime=Fi#file_info.mtime,
+ atime=Fi#file_info.atime,
+ ctime=Fi#file_info.ctime,
+ mode=Fi#file_info.mode,
+ uid=Fi#file_info.uid,
+ gid=Fi#file_info.gid,
+ typeflag=?TYPE_REGULAR},
+ do_fileinfo_to_header(BaseHeader, Fi, Link).
+
+do_fileinfo_to_header(Header, #file_info{size=Size,type=regular}, _Link) ->
+ Header#tar_header{size=Size,typeflag=?TYPE_REGULAR};
+do_fileinfo_to_header(#tar_header{name=Name}=Header,
+ #file_info{type=directory}, _Link) ->
+ Header#tar_header{name=Name++"/",typeflag=?TYPE_DIR};
+do_fileinfo_to_header(Header, #file_info{type=symlink}, Link) ->
+ Header#tar_header{typeflag=?TYPE_SYMLINK,linkname=Link};
+do_fileinfo_to_header(Header, #file_info{type=device,mode=Mode}=Fi, _Link)
+ when (Mode band ?S_IFMT) =:= ?S_IFCHR ->
+ Header#tar_header{typeflag=?TYPE_CHAR,
+ devmajor=Fi#file_info.major_device,
+ devminor=Fi#file_info.minor_device};
+do_fileinfo_to_header(Header, #file_info{type=device,mode=Mode}=Fi, _Link)
+ when (Mode band ?S_IFMT) =:= ?S_IFBLK ->
+ Header#tar_header{typeflag=?TYPE_BLOCK,
+ devmajor=Fi#file_info.major_device,
+ devminor=Fi#file_info.minor_device};
+do_fileinfo_to_header(Header, #file_info{type=other,mode=Mode}, _Link)
+ when (Mode band ?S_IFMT) =:= ?S_FIFO ->
+ Header#tar_header{typeflag=?TYPE_FIFO};
+do_fileinfo_to_header(Header, Fi, _Link) ->
+ {error, {invalid_file_type, Header#tar_header.name, Fi}}.
+
+is_ascii(Str) when is_list(Str) ->
+ not lists:any(fun (Char) -> Char >= 16#80 end, Str);
+is_ascii(Bin) when is_binary(Bin) ->
+ is_ascii1(Bin).
+
+is_ascii1(<<>>) ->
+ true;
+is_ascii1(<<C,_Rest/binary>>) when C >= 16#80 ->
+ false;
+is_ascii1(<<_, Rest/binary>>) ->
+ is_ascii1(Rest).
+
+to_ascii(Str) when is_list(Str) ->
+ case is_ascii(Str) of
+ true ->
+ unicode:characters_to_binary(Str);
+ false ->
+ Chars = lists:filter(fun (Char) -> Char < 16#80 end, Str),
+ unicode:characters_to_binary(Chars)
+ end;
+to_ascii(Bin) when is_binary(Bin) ->
+ to_ascii(Bin, <<>>).
+to_ascii(<<>>, Acc) ->
+ Acc;
+to_ascii(<<C, Rest/binary>>, Acc) when C < 16#80 ->
+ to_ascii(Rest, <<Acc/binary,C>>);
+to_ascii(<<_, Rest/binary>>, Acc) ->
+ to_ascii(Rest, Acc).
+
+is_header_only_type(?TYPE_SYMLINK) -> true;
+is_header_only_type(?TYPE_LINK) -> true;
+is_header_only_type(?TYPE_DIR) -> true;
+is_header_only_type(_) -> false.
+
+posix_to_erlang_time(Sec) ->
+ OneMillion = 1000000,
+ Time = calendar:now_to_datetime({Sec div OneMillion, Sec rem OneMillion, 0}),
+ erlang:universaltime_to_localtime(Time).
+
+foldl_read(#reader{access=read}=Reader, Fun, Accu, #read_opts{}=Opts)
+ when is_function(Fun,4) ->
+ case foldl_read0(Reader, Fun, Accu, Opts) of
+ {ok, Result, _Reader2} ->
+ Result;
+ {error, _} = Err ->
+ Err
+ end;
+foldl_read(#reader{access=Access}, _Fun, _Accu, _Opts) ->
+ {error, {read_mode_expected, Access}};
+foldl_read(TarName, Fun, Accu, #read_opts{}=Opts)
+ when is_function(Fun,4) ->
+ try open(TarName, [read|Opts#read_opts.open_mode]) of
+ {ok, #reader{access=read}=Reader} ->
+ foldl_read(Reader, Fun, Accu, Opts);
+ {error, _} = Err ->
+ Err
+ catch
+ throw:Err ->
+ Err
end.
-%% Get the name of the file from the prefix and name fields of the
-%% tar header.
-
-get_name(Bin0) ->
- List0 = get_name_raw(Bin0),
- case file:native_name_encoding() of
- utf8 ->
- Bin = list_to_binary(List0),
- case unicode:characters_to_list(Bin) of
- {error,_,_} ->
- List0;
- List when is_list(List) ->
- List
- end;
- latin1 ->
- List0
+foldl_read0(Reader, Fun, Accu, Opts) ->
+ try foldl_read1(Fun, Accu, Reader, Opts, #{}) of
+ {ok,_,_} = Ok ->
+ Ok
+ catch
+ throw:{error, {Reason, Format, Args}} ->
+ read_verbose(Opts, Format, Args),
+ {error, Reason};
+ throw:Err ->
+ Err
end.
-get_name_raw(Bin) ->
- Name = from_string(Bin, ?th_name, ?th_name_len),
- case binary_to_list(Bin, ?th_prefix+1, ?th_prefix+1) of
- [0] ->
- Name;
- [_] ->
- Prefix = binary_to_list(Bin, ?th_prefix+1, byte_size(Bin)),
- lists:reverse(remove_nulls(Prefix), [$/|Name])
+foldl_read1(Fun, Accu0, Reader0, Opts, ExtraHeaders) ->
+ {ok, Reader1} = skip_unread(Reader0),
+ case get_header(Reader1) of
+ eof ->
+ Fun(eof, Reader1, Opts, Accu0);
+ {Header, Reader2} ->
+ case Header#tar_header.typeflag of
+ ?TYPE_X_HEADER ->
+ {ExtraHeaders2, Reader3} = parse_pax(Reader2),
+ ExtraHeaders3 = maps:merge(ExtraHeaders, ExtraHeaders2),
+ foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders3);
+ ?TYPE_GNU_LONGNAME ->
+ {RealName, Reader3} = get_real_name(Reader2),
+ ExtraHeaders2 = maps:put(?PAX_PATH,
+ parse_string(RealName), ExtraHeaders),
+ foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders2);
+ ?TYPE_GNU_LONGLINK ->
+ {RealName, Reader3} = get_real_name(Reader2),
+ ExtraHeaders2 = maps:put(?PAX_LINKPATH,
+ parse_string(RealName), ExtraHeaders),
+ foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders2);
+ _ ->
+ Header1 = merge_pax(Header, ExtraHeaders),
+ {ok, NewAccu, Reader3} = Fun(Header1, Reader2, Opts, Accu0),
+ foldl_read1(Fun, NewAccu, Reader3, Opts, #{})
+ end
end.
-from_string(Bin, Pos, Len) ->
- lists:reverse(remove_nulls(binary_to_list(Bin, Pos+1, Pos+Len))).
-
-%% Returns all characters up to (but not including) the first null
-%% character, in REVERSE order.
-
-remove_nulls(List) ->
- remove_nulls(List, []).
-
-remove_nulls([0|_], Result) ->
- remove_nulls([], Result);
-remove_nulls([C|Rest], Result) ->
- remove_nulls(Rest, [C|Result]);
-remove_nulls([], Result) ->
- Result.
-
-from_octal(Bin, Pos, Len) ->
- from_octal(binary_to_list(Bin, Pos+1, Pos+Len)).
-
-from_octal([$\s|Rest]) ->
- from_octal(Rest);
-from_octal([Digit|Rest]) when $0 =< Digit, Digit =< $7 ->
- from_octal(Rest, Digit-$0);
-from_octal(Bin) when is_binary(Bin) ->
- from_octal(binary_to_list(Bin));
-from_octal(Other) ->
- throw({error, {bad_header, "Bad octal number: ~p", [Other]}}).
-
-from_octal([Digit|Rest], Result) when $0 =< Digit, Digit =< $7 ->
- from_octal(Rest, Result*8+Digit-$0);
-from_octal([$\s|_], Result) ->
- Result;
-from_octal([0|_], Result) ->
- Result;
-from_octal(Other, _) ->
- throw({error, {bad_header, "Bad contents in octal field: ~p", [Other]}}).
-
-%% Retrieves the next element from the archive.
-%% Returns {ok, Bin} | eof | {error, Reason}
-
-get_element(File, #tar_header{size = 0}) ->
- skip_to_next(File),
- {ok,<<>>};
-get_element(File, #tar_header{size = Size}) ->
- case do_read(File, Size) of
- {ok,Bin}=Res when byte_size(Bin) =:= Size ->
- skip_to_next(File),
- Res;
- {ok,List} when length(List) =:= Size ->
- skip_to_next(File),
- {ok,list_to_binary(List)};
- {ok,_} -> throw({error,eof});
- {error, Reason} -> throw({error, Reason});
- eof -> throw({error,eof})
+%% Applies all known PAX attributes to the current tar header
+-spec merge_pax(tar_header(), #{binary() => binary()}) -> tar_header().
+merge_pax(Header, ExtraHeaders) when is_map(ExtraHeaders) ->
+ do_merge_pax(Header, maps:to_list(ExtraHeaders)).
+
+do_merge_pax(Header, []) ->
+ Header;
+do_merge_pax(Header, [{?PAX_PATH, Path}|Rest]) ->
+ do_merge_pax(Header#tar_header{name=unicode:characters_to_list(Path)}, Rest);
+do_merge_pax(Header, [{?PAX_LINKPATH, LinkPath}|Rest]) ->
+ do_merge_pax(Header#tar_header{linkname=unicode:characters_to_list(LinkPath)}, Rest);
+do_merge_pax(Header, [{?PAX_GNAME, Gname}|Rest]) ->
+ do_merge_pax(Header#tar_header{gname=unicode:characters_to_list(Gname)}, Rest);
+do_merge_pax(Header, [{?PAX_UNAME, Uname}|Rest]) ->
+ do_merge_pax(Header#tar_header{uname=unicode:characters_to_list(Uname)}, Rest);
+do_merge_pax(Header, [{?PAX_UID, Uid}|Rest]) ->
+ Uid2 = binary_to_integer(Uid),
+ do_merge_pax(Header#tar_header{uid=Uid2}, Rest);
+do_merge_pax(Header, [{?PAX_GID, Gid}|Rest]) ->
+ Gid2 = binary_to_integer(Gid),
+ do_merge_pax(Header#tar_header{gid=Gid2}, Rest);
+do_merge_pax(Header, [{?PAX_ATIME, Atime}|Rest]) ->
+ Atime2 = parse_pax_time(Atime),
+ do_merge_pax(Header#tar_header{atime=Atime2}, Rest);
+do_merge_pax(Header, [{?PAX_MTIME, Mtime}|Rest]) ->
+ Mtime2 = parse_pax_time(Mtime),
+ do_merge_pax(Header#tar_header{mtime=Mtime2}, Rest);
+do_merge_pax(Header, [{?PAX_CTIME, Ctime}|Rest]) ->
+ Ctime2 = parse_pax_time(Ctime),
+ do_merge_pax(Header#tar_header{ctime=Ctime2}, Rest);
+do_merge_pax(Header, [{?PAX_SIZE, Size}|Rest]) ->
+ Size2 = binary_to_integer(Size),
+ do_merge_pax(Header#tar_header{size=Size2}, Rest);
+do_merge_pax(Header, [{<<?PAX_XATTR_STR, _Key/binary>>, _Value}|Rest]) ->
+ do_merge_pax(Header, Rest);
+do_merge_pax(Header, [_Ignore|Rest]) ->
+ do_merge_pax(Header, Rest).
+
+%% Returns the time since UNIX epoch as a datetime
+-spec parse_pax_time(binary()) -> calendar:datetime().
+parse_pax_time(Bin) when is_binary(Bin) ->
+ TotalNano = case binary:split(Bin, [<<$.>>]) of
+ [SecondsStr, NanoStr0] ->
+ Seconds = binary_to_integer(SecondsStr),
+ if byte_size(NanoStr0) < ?MAX_NANO_INT_SIZE ->
+ %% right pad
+ PaddingN = ?MAX_NANO_INT_SIZE-byte_size(NanoStr0),
+ Padding = binary:copy(<<$0>>, PaddingN),
+ NanoStr1 = <<NanoStr0/binary,Padding/binary>>,
+ Nano = binary_to_integer(NanoStr1),
+ (Seconds*?BILLION)+Nano;
+ byte_size(NanoStr0) > ?MAX_NANO_INT_SIZE ->
+ %% right truncate
+ NanoStr1 = binary_part(NanoStr0, 0, ?MAX_NANO_INT_SIZE),
+ Nano = binary_to_integer(NanoStr1),
+ (Seconds*?BILLION)+Nano;
+ true ->
+ (Seconds*?BILLION)+binary_to_integer(NanoStr0)
+ end;
+ [SecondsStr] ->
+ binary_to_integer(SecondsStr)*?BILLION
+ end,
+ %% truncate to microseconds
+ Micro = TotalNano div 1000,
+ Mega = Micro div 1000000000000,
+ Secs = Micro div 1000000 - (Mega*1000000),
+ Micro2 = Micro rem 1000000,
+ calendar:now_to_datetime({Mega, Secs, Micro2}).
+
+%% Given a regular file reader, reads the whole file and
+%% parses all extended attributes it contains.
+parse_pax(#reg_file_reader{handle=Handle,num_bytes=0}) ->
+ {#{}, Handle};
+parse_pax(#reg_file_reader{handle=Handle0,num_bytes=NumBytes}) ->
+ case do_read(Handle0, NumBytes) of
+ {ok, Bytes, Handle1} ->
+ do_parse_pax(Handle1, Bytes, #{});
+ {error, _} = Err ->
+ throw(Err)
end.
-%% Verify the checksum in the header. First try an unsigned addition
-%% of all bytes in the header (as it should be according to Posix).
-
-verify_checksum(Bin) ->
- <<H1:?th_chksum/binary,CheckStr:?th_chksum_len/binary,H2/binary>> = Bin,
- case checksum(H1) + checksum(H2) of
- 0 -> eof;
- Checksum0 ->
- Csum = from_octal(CheckStr),
- CsumInit = ?th_chksum_len * $\s,
- case Checksum0 + CsumInit of
- Csum -> ok;
- Unsigned ->
- verify_checksum(H1, H2, CsumInit, Csum, Unsigned)
- end
+do_parse_pax(Reader, <<>>, Headers) ->
+ {Headers, Reader};
+do_parse_pax(Reader, Bin, Headers) ->
+ {Key, Value, Residual} = parse_pax_record(Bin),
+ NewHeaders = maps:put(Key, Value, Headers),
+ do_parse_pax(Reader, Residual, NewHeaders).
+
+%% Parse an extended attribute
+parse_pax_record(Bin) when is_binary(Bin) ->
+ case binary:split(Bin, [<<$\n>>]) of
+ [Record, Residual] ->
+ case binary:split(Record, [<<$\s>>], [trim_all]) of
+ [_Len, Record1] ->
+ case binary:split(Record1, [<<$=>>], [trim_all]) of
+ [AttrName, AttrValue] ->
+ {AttrName, AttrValue, Residual};
+ _Other ->
+ throw({error, malformed_pax_record})
+ end;
+ _Other ->
+ throw({error, malformed_pax_record})
+ end;
+ _Other ->
+ throw({error, malformed_pax_record})
end.
-%% The checksums didn't match. Now try a signed addition.
+get_real_name(#reg_file_reader{handle=Handle,num_bytes=0}) ->
+ {"", Handle};
+get_real_name(#reg_file_reader{handle=Handle0,num_bytes=NumBytes}) ->
+ case do_read(Handle0, NumBytes) of
+ {ok, RealName, Handle1} ->
+ {RealName, Handle1};
+ {error, _} = Err ->
+ throw(Err)
+ end;
+get_real_name(#sparse_file_reader{num_bytes=NumBytes}=Reader0) ->
+ case do_read(Reader0, NumBytes) of
+ {ok, RealName, Reader1} ->
+ {RealName, Reader1};
+ {error, _} = Err ->
+ throw(Err)
+ end.
-verify_checksum(H1, H2, Csum, ShouldBe, Unsigned) ->
- case signed_sum(binary_to_list(H1), signed_sum(binary_to_list(H2), Csum)) of
- ShouldBe -> ok;
- Signed ->
- throw({error,
- {bad_header,
- "Incorrect directory checksum ~w (~w), should be ~w",
- [Signed, Unsigned, ShouldBe]}})
+%% Skip the remaining bytes for the current file entry
+skip_file(#reg_file_reader{handle=Handle0,pos=Pos,size=Size}=Reader) ->
+ Padding = skip_padding(Size),
+ AbsPos = Handle0#reader.pos + (Size-Pos) + Padding,
+ case do_position(Handle0, AbsPos) of
+ {ok, _, Handle1} ->
+ Reader#reg_file_reader{handle=Handle1,num_bytes=0,pos=Size};
+ Err ->
+ throw(Err)
+ end;
+skip_file(#sparse_file_reader{pos=Pos,size=Size}=Reader) ->
+ case do_read(Reader, Size-Pos) of
+ {ok, _, Reader2} ->
+ Reader2;
+ Err ->
+ throw(Err)
end.
-signed_sum([C|Rest], Sum) when C < 128 ->
- signed_sum(Rest, Sum+C);
-signed_sum([C|Rest], Sum) ->
- signed_sum(Rest, Sum+C-256);
-signed_sum([], Sum) -> Sum.
-
-write_extracted_element(Header, Bin, Opts)
- when Opts#read_opts.output =:= memory ->
- case Header#tar_header.typeflag of
- regular ->
- {ok, {Header#tar_header.name, Bin}};
- _ ->
- ok
+skip_padding(0) ->
+ 0;
+skip_padding(Size) when (Size rem ?BLOCK_SIZE) =:= 0 ->
+ 0;
+skip_padding(Size) when Size =< ?BLOCK_SIZE ->
+ ?BLOCK_SIZE - Size;
+skip_padding(Size) ->
+ ?BLOCK_SIZE - (Size rem ?BLOCK_SIZE).
+
+skip_unread(#reader{pos=Pos}=Reader0) when (Pos rem ?BLOCK_SIZE) > 0 ->
+ Padding = skip_padding(Pos + ?BLOCK_SIZE),
+ AbsPos = Pos + Padding,
+ case do_position(Reader0, AbsPos) of
+ {ok, _, Reader1} ->
+ {ok, Reader1};
+ Err ->
+ throw(Err)
+ end;
+skip_unread(#reader{}=Reader) ->
+ {ok, Reader};
+skip_unread(#reg_file_reader{handle=Handle,num_bytes=0}) ->
+ skip_unread(Handle);
+skip_unread(#reg_file_reader{}=Reader) ->
+ #reg_file_reader{handle=Handle} = skip_file(Reader),
+ {ok, Handle};
+skip_unread(#sparse_file_reader{handle=Handle,num_bytes=0}) ->
+ skip_unread(Handle);
+skip_unread(#sparse_file_reader{}=Reader) ->
+ #sparse_file_reader{handle=Handle} = skip_file(Reader),
+ {ok, Handle}.
+
+write_extracted_element(#tar_header{name=Name,typeflag=Type},
+ Bin,
+ #read_opts{output=memory}=Opts) ->
+ case typeflag(Type) of
+ regular ->
+ read_verbose(Opts, "x ~ts~n", [Name]),
+ {ok, {Name, Bin}};
+ _ ->
+ ok
end;
-write_extracted_element(Header, Bin, Opts) ->
- Name = filename:absname(Header#tar_header.name, Opts#read_opts.cwd),
- Created =
- case Header#tar_header.typeflag of
- regular ->
- write_extracted_file(Name, Bin, Opts);
- directory ->
- create_extracted_dir(Name, Opts);
- symlink ->
- create_symlink(Name, Header, Opts);
- Other -> % Ignore.
- read_verbose(Opts, "x ~ts - unsupported type ~p~n",
- [Name, Other]),
- not_written
- end,
+write_extracted_element(#tar_header{name=Name0}=Header, Bin, Opts) ->
+ Name1 = filename:absname(Name0, Opts#read_opts.cwd),
+ Created =
+ case typeflag(Header#tar_header.typeflag) of
+ regular ->
+ create_regular(Name1, Name0, Bin, Opts);
+ directory ->
+ read_verbose(Opts, "x ~ts~n", [Name0]),
+ create_extracted_dir(Name1, Opts);
+ symlink ->
+ read_verbose(Opts, "x ~ts~n", [Name0]),
+ create_symlink(Name1, Header#tar_header.linkname, Opts);
+ Device when Device =:= char orelse Device =:= block ->
+ %% char/block devices will be created as empty files
+ %% and then have their major/minor device set later
+ create_regular(Name1, Name0, <<>>, Opts);
+ fifo ->
+ %% fifo devices will be created as empty files
+ create_regular(Name1, Name0, <<>>, Opts);
+ Other -> % Ignore.
+ read_verbose(Opts, "x ~ts - unsupported type ~p~n",
+ [Name0, Other]),
+ not_written
+ end,
case Created of
- ok -> set_extracted_file_info(Name, Header);
- not_written -> ok
+ ok -> set_extracted_file_info(Name1, Header);
+ not_written -> ok
+ end.
+
+create_regular(Name, NameInArchive, Bin, Opts) ->
+ case write_extracted_file(Name, Bin, Opts) of
+ not_written ->
+ read_verbose(Opts, "x ~ts - exists, not created~n", [NameInArchive]),
+ not_written;
+ Ok ->
+ read_verbose(Opts, "x ~ts~n", [NameInArchive]),
+ Ok
end.
create_extracted_dir(Name, _Opts) ->
case file:make_dir(Name) of
- ok -> ok;
- {error,enotsup} -> not_written;
- {error,eexist} -> not_written;
- {error,enoent} -> make_dirs(Name, dir);
- {error,Reason} -> throw({error, Reason})
+ ok -> ok;
+ {error,enotsup} -> not_written;
+ {error,eexist} -> not_written;
+ {error,enoent} -> make_dirs(Name, dir);
+ {error,Reason} -> throw({error, Reason})
end.
-create_symlink(Name, #tar_header{linkname=Linkname}=Header, Opts) ->
+create_symlink(Name, Linkname, Opts) ->
case file:make_symlink(Linkname, Name) of
- ok -> ok;
- {error,enoent} ->
- ok = make_dirs(Name, file),
- create_symlink(Name, Header, Opts);
- {error,eexist} -> not_written;
- {error,enotsup} ->
- read_verbose(Opts, "x ~ts - symbolic links not supported~n", [Name]),
- not_written;
- {error,Reason} -> throw({error, Reason})
+ ok -> ok;
+ {error,enoent} ->
+ ok = make_dirs(Name, file),
+ create_symlink(Name, Linkname, Opts);
+ {error,eexist} -> not_written;
+ {error,enotsup} ->
+ read_verbose(Opts, "x ~ts - symbolic links not supported~n", [Name]),
+ not_written;
+ {error,Reason} -> throw({error, Reason})
end.
write_extracted_file(Name, Bin, Opts) ->
Write =
- case Opts#read_opts.keep_old_files of
- true ->
- case file:read_file_info(Name) of
- {ok, _} -> false;
- _ -> true
- end;
- false -> true
- end,
+ case Opts#read_opts.keep_old_files of
+ true ->
+ case file:read_file_info(Name) of
+ {ok, _} -> false;
+ _ -> true
+ end;
+ false -> true
+ end,
case Write of
- true ->
- read_verbose(Opts, "x ~ts~n", [Name]),
- write_file(Name, Bin);
- false ->
- read_verbose(Opts, "x ~ts - exists, not created~n", [Name]),
- not_written
+ true -> write_file(Name, Bin);
+ false -> not_written
end.
write_file(Name, Bin) ->
case file:write_file(Name, Bin) of
- ok -> ok;
- {error,enoent} ->
- ok = make_dirs(Name, file),
- write_file(Name, Bin);
- {error,Reason} ->
- throw({error, Reason})
+ ok -> ok;
+ {error,enoent} ->
+ ok = make_dirs(Name, file),
+ write_file(Name, Bin);
+ {error,Reason} ->
+ throw({error, Reason})
end.
-set_extracted_file_info(_, #tar_header{typeflag = symlink}) -> ok;
-set_extracted_file_info(Name, #tar_header{mode=Mode, mtime=Mtime}) ->
- Info = #file_info{mode=Mode, mtime=posix_to_erlang_time(Mtime)},
+set_extracted_file_info(_, #tar_header{typeflag = ?TYPE_SYMLINK}) -> ok;
+set_extracted_file_info(_, #tar_header{typeflag = ?TYPE_LINK}) -> ok;
+set_extracted_file_info(Name, #tar_header{typeflag = ?TYPE_CHAR}=Header) ->
+ set_device_info(Name, Header);
+set_extracted_file_info(Name, #tar_header{typeflag = ?TYPE_BLOCK}=Header) ->
+ set_device_info(Name, Header);
+set_extracted_file_info(Name, #tar_header{mtime=Mtime,mode=Mode}) ->
+ Info = #file_info{mode=Mode, mtime=Mtime},
+ file:write_file_info(Name, Info).
+
+set_device_info(Name, #tar_header{}=Header) ->
+ Mtime = Header#tar_header.mtime,
+ Mode = Header#tar_header.mode,
+ Devmajor = Header#tar_header.devmajor,
+ Devminor = Header#tar_header.devminor,
+ Info = #file_info{
+ mode=Mode,
+ mtime=Mtime,
+ major_device=Devmajor,
+ minor_device=Devminor
+ },
file:write_file_info(Name, Info).
%% Makes all directories leading up to the file.
make_dirs(Name, file) ->
- filelib:ensure_dir(Name);
+ filelib:ensure_dir(Name);
make_dirs(Name, dir) ->
- filelib:ensure_dir(filename:join(Name,"*")).
+ filelib:ensure_dir(filename:join(Name,"*")).
%% Prints the message on if the verbose option is given (for reading).
-
read_verbose(#read_opts{verbose=true}, Format, Args) ->
- io:format(Format, Args),
- io:nl();
+ io:format(Format, Args);
read_verbose(_, _, _) ->
ok.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%
-%%% Utility functions.
-%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% Returns the checksum of a binary.
-
-checksum(Bin) -> checksum(Bin, 0).
-
-checksum(<<A,B,C,D,E,F,G,H,T/binary>>, Sum) ->
- checksum(T, Sum+A+B+C+D+E+F+G+H);
-checksum(<<A,T/binary>>, Sum) ->
- checksum(T, Sum+A);
-checksum(<<>>, Sum) -> Sum.
-
-%% Returns a list of zeroes to pad out to the given block size.
-
-padding(Size, BlockSize) ->
- zeroes(pad_size(Size, BlockSize)).
-
-pad_size(Size, BlockSize) ->
- case Size rem BlockSize of
- 0 -> 0;
- Rem -> BlockSize-Rem
- end.
-
-zeroes(0) -> [];
-zeroes(1) -> [0];
-zeroes(2) -> [0,0];
-zeroes(Number) ->
- Half = zeroes(Number div 2),
- case Number rem 2 of
- 0 -> [Half|Half];
- 1 -> [Half|[0|Half]]
- end.
-
-%% Skips the given number of bytes rounded up to an even record.
-
-skip(File, Size) ->
- %% Note: There is no point in handling failure to get the current position
- %% in the file. If it doesn't work, something serious is wrong.
- Amount = ((Size + ?record_size - 1) div ?record_size) * ?record_size,
- {ok,_} = do_position(File, {cur, Amount}),
- ok.
-
-%% Skips to the next record in the file.
-
-skip_to_next(File) ->
- %% Note: There is no point in handling failure to get the current position
- %% in the file. If it doesn't work, something serious is wrong.
- {ok, Position} = do_position(File, {cur, 0}),
- NewPosition = ((Position + ?record_size - 1) div ?record_size) * ?record_size,
- {ok,NewPosition} = do_position(File, NewPosition),
- ok.
-
%% Prints the message on if the verbose option is given.
-
add_verbose(#add_opts{verbose=true}, Format, Args) ->
io:format(Format, Args);
add_verbose(_, _, _) ->
ok.
-%% Converts a tuple containing the time to a Posix time (seconds
-%% since Jan 1, 1970).
+%%%%%%%%%%%%%%%%%%
+%% I/O primitives
+%%%%%%%%%%%%%%%%%%
+
+do_write(#reader{handle=Handle,func=Fun}=Reader0, Data)
+ when is_function(Fun,2) ->
+ case Fun(write,{Handle,Data}) of
+ ok ->
+ {ok, Pos, Reader1} = do_position(Reader0, {cur,0}),
+ {ok, Reader1#reader{pos=Pos}};
+ {error, _} = Err ->
+ Err
+ end.
-posix_time(Time) ->
- EpochStart = {{1970,1,1},{0,0,0}},
- {Days,{Hour,Min,Sec}} = calendar:time_difference(EpochStart, Time),
- 86400*Days + 3600*Hour + 60*Min + Sec.
+do_copy(#reader{func=Fun}=Reader, Source, #add_opts{chunk_size=0}=Opts)
+ when is_function(Fun, 2) ->
+ do_copy(Reader, Source, Opts#add_opts{chunk_size=65536});
+do_copy(#reader{func=Fun}=Reader, Source, #add_opts{chunk_size=ChunkSize})
+ when is_function(Fun, 2) ->
+ case file:open(Source, [read, binary]) of
+ {ok, SourceFd} ->
+ case copy_chunked(Reader, SourceFd, ChunkSize, 0) of
+ {ok, _Copied, _Reader2} = Ok->
+ _ = file:close(SourceFd),
+ Ok;
+ Err ->
+ _ = file:close(SourceFd),
+ throw(Err)
+ end;
+ Err ->
+ throw(Err)
+ end.
-posix_to_erlang_time(Sec) ->
- OneMillion = 1000000,
- Time = calendar:now_to_datetime({Sec div OneMillion, Sec rem OneMillion, 0}),
- erlang:universaltime_to_localtime(Time).
+copy_chunked(#reader{}=Reader, Source, ChunkSize, Copied) ->
+ case file:read(Source, ChunkSize) of
+ {ok, Bin} ->
+ {ok, Reader2} = do_write(Reader, Bin),
+ copy_chunked(Reader2, Source, ChunkSize, Copied+byte_size(Bin));
+ eof ->
+ {ok, Copied, Reader};
+ Other ->
+ Other
+ end.
-read_file_and_info(Name, Opts) ->
- ReadInfo = Opts#add_opts.read_info,
- case ReadInfo(Name) of
- {ok,Info} when Info#file_info.type =:= regular,
- Opts#add_opts.chunk_size>0 ->
- {ok,chunked,Info};
- {ok,Info} when Info#file_info.type =:= regular ->
- case file:read_file(Name) of
- {ok,Bin} ->
- {ok,Bin,Info};
- Error ->
- Error
- end;
- {ok,Info} when Info#file_info.type =:= symlink ->
- case file:read_link(Name) of
- {ok,PointsTo} ->
- {ok,PointsTo,Info};
- Error ->
- Error
- end;
- {ok, Info} ->
- {ok,[],Info};
- Error ->
- Error
+
+do_position(#reader{handle=Handle,func=Fun}=Reader, Pos)
+ when is_function(Fun,2)->
+ case Fun(position, {Handle,Pos}) of
+ {ok, NewPos} ->
+ %% since Pos may not always be an absolute seek,
+ %% make sure we update the reader with the new absolute position
+ {ok, AbsPos} = Fun(position, {Handle, {cur, 0}}),
+ {ok, NewPos, Reader#reader{pos=AbsPos}};
+ Other ->
+ Other
end.
-foreach_while_ok(Fun, [First|Rest]) ->
- case Fun(First) of
- ok -> foreach_while_ok(Fun, Rest);
- Other -> Other
+do_read(#reg_file_reader{handle=Handle,pos=Pos,size=Size}=Reader, Len) ->
+ NumBytes = Size - Pos,
+ ActualLen = if NumBytes - Len < 0 -> NumBytes; true -> Len end,
+ case do_read(Handle, ActualLen) of
+ {ok, Bin, Handle2} ->
+ NewPos = Pos + ActualLen,
+ NumBytes2 = Size - NewPos,
+ Reader1 = Reader#reg_file_reader{
+ handle=Handle2,
+ pos=NewPos,
+ num_bytes=NumBytes2},
+ {ok, Bin, Reader1};
+ Other ->
+ Other
end;
-foreach_while_ok(_, []) -> ok.
-
-open_mode(Mode) ->
- open_mode(Mode, false, [raw], []).
+do_read(#sparse_file_reader{}=Reader, Len) ->
+ do_sparse_read(Reader, Len);
+do_read(#reader{pos=Pos,handle=Handle,func=Fun}=Reader, Len)
+ when is_function(Fun,2)->
+ %% Always convert to binary internally
+ case Fun(read2,{Handle,Len}) of
+ {ok, List} when is_list(List) ->
+ Bin = list_to_binary(List),
+ NewPos = Pos+byte_size(Bin),
+ {ok, Bin, Reader#reader{pos=NewPos}};
+ {ok, Bin} when is_binary(Bin) ->
+ NewPos = Pos+byte_size(Bin),
+ {ok, Bin, Reader#reader{pos=NewPos}};
+ Other ->
+ Other
+ end.
-open_mode(read, _, Raw, _) ->
- {ok, read, Raw, []};
-open_mode(write, _, Raw, _) ->
- {ok, write, Raw, []};
-open_mode([read|Rest], false, Raw, Opts) ->
- open_mode(Rest, read, Raw, Opts);
-open_mode([write|Rest], false, Raw, Opts) ->
- open_mode(Rest, write, Raw, Opts);
-open_mode([compressed|Rest], Access, Raw, Opts) ->
- open_mode(Rest, Access, Raw, [compressed|Opts]);
-open_mode([cooked|Rest], Access, _Raw, Opts) ->
- open_mode(Rest, Access, [], Opts);
-open_mode([], Access, Raw, Opts) ->
- {ok, Access, Raw, Opts};
-open_mode(_, _, _, _) ->
- {error, einval}.
-%%%================================================================
-do_write({tar_descriptor,UsrHandle,Fun}, Data) -> Fun(write,{UsrHandle,Data}).
+do_sparse_read(Reader, Len) ->
+ do_sparse_read(Reader, Len, <<>>).
+
+do_sparse_read(#sparse_file_reader{sparse_map=[#sparse_entry{num_bytes=0}|Entries]
+ }=Reader0, Len, Acc) ->
+ %% skip all empty fragments
+ Reader1 = Reader0#sparse_file_reader{sparse_map=Entries},
+ do_sparse_read(Reader1, Len, Acc);
+do_sparse_read(#sparse_file_reader{sparse_map=[],
+ pos=Pos,size=Size}=Reader0, Len, Acc)
+ when Pos < Size ->
+ %% if there are no more fragments, it is possible that there is one last sparse hole
+ %% this behaviour matches the BSD tar utility
+ %% however, GNU tar stops returning data even if we haven't reached the end
+ {ok, Bin, Reader1} = read_sparse_hole(Reader0, Size, Len),
+ do_sparse_read(Reader1, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>);
+do_sparse_read(#sparse_file_reader{sparse_map=[]}=Reader, _Len, Acc) ->
+ {ok, Acc, Reader};
+do_sparse_read(#sparse_file_reader{}=Reader, 0, Acc) ->
+ {ok, Acc, Reader};
+do_sparse_read(#sparse_file_reader{sparse_map=[#sparse_entry{offset=Offset}|_],
+ pos=Pos}=Reader0, Len, Acc)
+ when Pos < Offset ->
+ {ok, Bin, Reader1} = read_sparse_hole(Reader0, Offset, Offset-Pos),
+ do_sparse_read(Reader1, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>);
+do_sparse_read(#sparse_file_reader{sparse_map=[Entry|Entries],
+ pos=Pos}=Reader0, Len, Acc) ->
+ %% we're in a data fragment, so read from it
+ %% end offset of fragment
+ EndPos = Entry#sparse_entry.offset + Entry#sparse_entry.num_bytes,
+ %% bytes left in fragment
+ NumBytes = EndPos - Pos,
+ ActualLen = if Len > NumBytes -> NumBytes; true -> Len end,
+ case do_read(Reader0#sparse_file_reader.handle, ActualLen) of
+ {ok, Bin, Handle} ->
+ BytesRead = byte_size(Bin),
+ ActualEndPos = Pos+BytesRead,
+ Reader1 = if ActualEndPos =:= EndPos ->
+ Reader0#sparse_file_reader{sparse_map=Entries};
+ true ->
+ Reader0
+ end,
+ Size = Reader1#sparse_file_reader.size,
+ NumBytes2 = Size - ActualEndPos,
+ Reader2 = Reader1#sparse_file_reader{
+ handle=Handle,
+ pos=ActualEndPos,
+ num_bytes=NumBytes2},
+ do_sparse_read(Reader2, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>);
+ Other ->
+ Other
+ end.
+
+%% Reads a sparse hole ending at Offset
+read_sparse_hole(#sparse_file_reader{pos=Pos}=Reader, Offset, Len) ->
+ N = Offset - Pos,
+ N2 = if N > Len ->
+ Len;
+ true ->
+ N
+ end,
+ Bin = <<0:N2/unit:8>>,
+ NumBytes = Reader#sparse_file_reader.size - (Pos+N2),
+ {ok, Bin, Reader#sparse_file_reader{
+ num_bytes=NumBytes,
+ pos=Pos+N2}}.
+
+-spec do_close(reader()) -> ok | {error, term()}.
+do_close(#reader{handle=Handle,func=Fun}) when is_function(Fun,2) ->
+ Fun(close,Handle).
+
+%%%%%%%%%%%%%%%%%%
+%% Option parsing
+%%%%%%%%%%%%%%%%%%
-do_position({tar_descriptor,UsrHandle,Fun}, Pos) -> Fun(position,{UsrHandle,Pos}).
+extract_opts(List) ->
+ extract_opts(List, default_options()).
-do_read({tar_descriptor,UsrHandle,Fun}, Len) -> Fun(read2,{UsrHandle,Len}).
+table_opts(List) ->
+ read_opts(List, default_options()).
+
+default_options() ->
+ {ok, Cwd} = file:get_cwd(),
+ #read_opts{cwd=Cwd}.
-do_close({tar_descriptor,UsrHandle,Fun}) -> Fun(close,UsrHandle).
+extract_opts([keep_old_files|Rest], Opts) ->
+ extract_opts(Rest, Opts#read_opts{keep_old_files=true});
+extract_opts([{cwd, Cwd}|Rest], Opts) ->
+ extract_opts(Rest, Opts#read_opts{cwd=Cwd});
+extract_opts([{files, Files}|Rest], Opts) ->
+ Set = ordsets:from_list(Files),
+ extract_opts(Rest, Opts#read_opts{files=Set});
+extract_opts([memory|Rest], Opts) ->
+ extract_opts(Rest, Opts#read_opts{output=memory});
+extract_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
+ extract_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]});
+extract_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
+ extract_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]});
+extract_opts([verbose|Rest], Opts) ->
+ extract_opts(Rest, Opts#read_opts{verbose=true});
+extract_opts([Other|Rest], Opts) ->
+ extract_opts(Rest, read_opts([Other], Opts));
+extract_opts([], Opts) ->
+ Opts.
+
+read_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
+ read_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]});
+read_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
+ read_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]});
+read_opts([verbose|Rest], Opts) ->
+ read_opts(Rest, Opts#read_opts{verbose=true});
+read_opts([_|Rest], Opts) ->
+ read_opts(Rest, Opts);
+read_opts([], Opts) ->
+ Opts.
diff --git a/lib/stdlib/src/erl_tar.hrl b/lib/stdlib/src/erl_tar.hrl
new file mode 100644
index 0000000000..d646d02989
--- /dev/null
+++ b/lib/stdlib/src/erl_tar.hrl
@@ -0,0 +1,394 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+
+%% Options used when adding files to a tar archive.
+-record(add_opts, {
+ read_info, %% Fun to use for read file/link info.
+ chunk_size = 0, %% For file reading when sending to sftp. 0=do not chunk
+ verbose = false}). %% Verbose on/off.
+-type add_opts() :: #add_opts{}.
+
+%% Options used when reading a tar archive.
+-record(read_opts, {
+ cwd :: string(), %% Current working directory.
+ keep_old_files = false :: boolean(), %% Owerwrite or not.
+ files = all, %% Set of files to extract (or all)
+ output = file :: 'file' | 'memory',
+ open_mode = [], %% Open mode options.
+ verbose = false :: boolean()}). %% Verbose on/off.
+-type read_opts() :: #read_opts{}.
+
+-type add_opt() :: dereference |
+ verbose |
+ {chunks, pos_integer()}.
+
+-type extract_opt() :: {cwd, string()} |
+ {files, [string()]} |
+ compressed |
+ cooked |
+ memory |
+ keep_old_files |
+ verbose.
+
+-type create_opt() :: compressed |
+ cooked |
+ dereference |
+ verbose.
+
+-type filelist() :: [file:filename() |
+ {string(), binary()} |
+ {string(), file:filename()}].
+
+%% The tar header, once fully parsed.
+-record(tar_header, {
+ name = "" :: string(), %% name of header file entry
+ mode = 8#100644 :: non_neg_integer(), %% permission and mode bits
+ uid = 0 :: non_neg_integer(), %% user id of owner
+ gid = 0 :: non_neg_integer(), %% group id of owner
+ size = 0 :: non_neg_integer(), %% length in bytes
+ mtime :: calendar:datetime(), %% modified time
+ typeflag :: char(), %% type of header entry
+ linkname = "" :: string(), %% target name of link
+ uname = "" :: string(), %% user name of owner
+ gname = "" :: string(), %% group name of owner
+ devmajor = 0 :: non_neg_integer(), %% major number of character or block device
+ devminor = 0 :: non_neg_integer(), %% minor number of character or block device
+ atime :: calendar:datetime(), %% access time
+ ctime :: calendar:datetime() %% status change time
+ }).
+-type tar_header() :: #tar_header{}.
+
+%% Metadata for a sparse file fragment
+-record(sparse_entry, {
+ offset = 0 :: non_neg_integer(),
+ num_bytes = 0 :: non_neg_integer()}).
+-type sparse_entry() :: #sparse_entry{}.
+%% Contains metadata about fragments of a sparse file
+-record(sparse_array, {
+ entries = [] :: [sparse_entry()],
+ is_extended = false :: boolean(),
+ max_entries = 0 :: non_neg_integer()}).
+-type sparse_array() :: #sparse_array{}.
+%% A subset of tar header fields common to all tar implementations
+-record(header_v7, {
+ name :: binary(),
+ mode :: binary(), %% octal
+ uid :: binary(), %% integer
+ gid :: binary(), %% integer
+ size :: binary(), %% integer
+ mtime :: binary(), %% integer
+ checksum :: binary(), %% integer
+ typeflag :: byte(), %% char
+ linkname :: binary()}).
+-type header_v7() :: #header_v7{}.
+%% The set of fields specific to GNU tar formatted archives
+-record(header_gnu, {
+ header_v7 :: header_v7(),
+ magic :: binary(),
+ version :: binary(),
+ uname :: binary(),
+ gname :: binary(),
+ devmajor :: binary(), %% integer
+ devminor :: binary(), %% integer
+ atime :: binary(), %% integer
+ ctime :: binary(), %% integer
+ sparse :: sparse_array(),
+ real_size :: binary()}). %% integer
+-type header_gnu() :: #header_gnu{}.
+%% The set of fields specific to STAR-formatted archives
+-record(header_star, {
+ header_v7 :: header_v7(),
+ magic :: binary(),
+ version :: binary(),
+ uname :: binary(),
+ gname :: binary(),
+ devmajor :: binary(), %% integer
+ devminor :: binary(), %% integer
+ prefix :: binary(),
+ atime :: binary(), %% integer
+ ctime :: binary(), %% integer
+ trailer :: binary()}).
+-type header_star() :: #header_star{}.
+%% The set of fields specific to USTAR-formatted archives
+-record(header_ustar, {
+ header_v7 :: header_v7(),
+ magic :: binary(),
+ version :: binary(),
+ uname :: binary(),
+ gname :: binary(),
+ devmajor :: binary(), %% integer
+ devminor :: binary(), %% integer
+ prefix :: binary()}).
+-type header_ustar() :: #header_ustar{}.
+
+-type header_fields() :: header_v7() |
+ header_gnu() |
+ header_star() |
+ header_ustar().
+
+%% The overall tar reader, it holds the low-level file handle,
+%% its access, position, and the I/O primitives wrapper.
+-record(reader, {
+ handle :: file:io_device() | term(),
+ access :: read | write | ram,
+ pos = 0 :: non_neg_integer(),
+ func :: file_op()
+ }).
+-type reader() :: #reader{}.
+%% A reader for a regular file within the tar archive,
+%% It tracks its current state relative to that file.
+-record(reg_file_reader, {
+ handle :: reader(),
+ num_bytes = 0,
+ pos = 0,
+ size = 0
+ }).
+-type reg_file_reader() :: #reg_file_reader{}.
+%% A reader for a sparse file within the tar archive,
+%% It tracks its current state relative to that file.
+-record(sparse_file_reader, {
+ handle :: reader(),
+ num_bytes = 0, %% bytes remaining
+ pos = 0, %% pos
+ size = 0, %% total size of file
+ sparse_map = #sparse_array{}
+ }).
+-type sparse_file_reader() :: #sparse_file_reader{}.
+
+%% Types for the readers
+-type reader_type() :: reader() | reg_file_reader() | sparse_file_reader().
+-type handle() :: file:io_device() | term().
+
+%% Type for the I/O primitive wrapper function
+-type file_op() :: fun((write | close | read2 | position,
+ {handle(), iodata()} | handle() | {handle(), non_neg_integer()}
+ | {handle(), non_neg_integer()}) ->
+ ok | eof | {ok, string() | binary()} | {ok, non_neg_integer()}
+ | {error, term()}).
+
+%% These constants (except S_IFMT) are
+%% used to determine what type of device
+%% a file is. Namely, `S_IFMT band file_info.mode`
+%% will equal one of these contants, and tells us
+%% which type it is. The stdlib file_info record
+%% does not differentiate between device types, and
+%% will not allow us to differentiate between sockets
+%% and named pipes. These constants are pulled from libc.
+-define(S_IFMT, 61440).
+-define(S_IFSOCK, 49152). %% socket
+-define(S_FIFO, 4096). %% fifo/named pipe
+-define(S_IFBLK, 24576). %% block device
+-define(S_IFCHR, 8192). %% character device
+
+%% Typeflag constants for the tar header
+-define(TYPE_REGULAR, $0). %% regular file
+-define(TYPE_REGULAR_A, 0). %% regular file
+-define(TYPE_LINK, $1). %% hard link
+-define(TYPE_SYMLINK, $2). %% symbolic link
+-define(TYPE_CHAR, $3). %% character device node
+-define(TYPE_BLOCK, $4). %% block device node
+-define(TYPE_DIR, $5). %% directory
+-define(TYPE_FIFO, $6). %% fifo node
+-define(TYPE_CONT, $7). %% reserved
+-define(TYPE_X_HEADER, $x). %% extended header
+-define(TYPE_X_GLOBAL_HEADER, $g). %% global extended header
+-define(TYPE_GNU_LONGNAME, $L). %% next file has a long name
+-define(TYPE_GNU_LONGLINK, $K). %% next file symlinks to a file with a long name
+-define(TYPE_GNU_SPARSE, $S). %% sparse file
+
+%% Mode constants from tar spec
+-define(MODE_ISUID, 4000). %% set uid
+-define(MODE_ISGID, 2000). %% set gid
+-define(MODE_ISVTX, 1000). %% save text (sticky bit)
+-define(MODE_ISDIR, 40000). %% directory
+-define(MODE_ISFIFO, 10000). %% fifo
+-define(MODE_ISREG, 100000). %% regular file
+-define(MODE_ISLNK, 120000). %% symbolic link
+-define(MODE_ISBLK, 60000). %% block special file
+-define(MODE_ISCHR, 20000). %% character special file
+-define(MODE_ISSOCK, 140000). %% socket
+
+%% Keywords for PAX extended header
+-define(PAX_ATIME, <<"atime">>).
+-define(PAX_CHARSET, <<"charset">>).
+-define(PAX_COMMENT, <<"comment">>).
+-define(PAX_CTIME, <<"ctime">>). %% ctime is not a valid pax header
+-define(PAX_GID, <<"gid">>).
+-define(PAX_GNAME, <<"gname">>).
+-define(PAX_LINKPATH, <<"linkpath">>).
+-define(PAX_MTIME, <<"mtime">>).
+-define(PAX_PATH, <<"path">>).
+-define(PAX_SIZE, <<"size">>).
+-define(PAX_UID, <<"uid">>).
+-define(PAX_UNAME, <<"uname">>).
+-define(PAX_XATTR, <<"SCHILY.xattr.">>).
+-define(PAX_XATTR_STR, "SCHILY.xattr.").
+-define(PAX_NONE, <<"">>).
+
+%% Tar format constants
+%% Unknown format
+-define(FORMAT_UNKNOWN, 0).
+%% The format of the original Unix V7 tar tool prior to standardization
+-define(FORMAT_V7, 1).
+%% The old and new GNU formats, incompatible with USTAR.
+%% This covers the old GNU sparse extension, but it does
+%% not cover the GNU sparse extensions using PAX headers,
+%% versions 0.0, 0.1, and 1.0; these fall under the PAX format.
+-define(FORMAT_GNU, 2).
+%% Schily's tar format, which is incompatible with USTAR.
+%% This does not cover STAR extensions to the PAX format; these
+%% fall under the PAX format.
+-define(FORMAT_STAR, 3).
+%% USTAR is the former standardization of tar defined in POSIX.1-1988,
+%% it is incompatible with the GNU and STAR formats.
+-define(FORMAT_USTAR, 4).
+%% PAX is the latest standardization of tar defined in POSIX.1-2001.
+%% This is an extension of USTAR and is "backwards compatible" with it.
+%%
+%% Some newer formats add their own extensions to PAX, such as GNU sparse
+%% files and SCHILY extended attributes. Since they are backwards compatible
+%% with PAX, they will be labelled as "PAX".
+-define(FORMAT_PAX, 5).
+
+%% Magic constants
+-define(MAGIC_GNU, <<"ustar ">>).
+-define(VERSION_GNU, <<" \x00">>).
+-define(MAGIC_USTAR, <<"ustar\x00">>).
+-define(VERSION_USTAR, <<"00">>).
+-define(TRAILER_STAR, <<"tar\x00">>).
+
+%% Size constants
+-define(BLOCK_SIZE, 512). %% size of each block in a tar stream
+-define(NAME_SIZE, 100). %% max length of the name field in USTAR format
+-define(PREFIX_SIZE, 155). %% max length of the prefix field in USTAR format
+
+%% Maximum size of a nanosecond value as an integer
+-define(MAX_NANO_INT_SIZE, 9).
+%% Maximum size of a 64-bit signed integer
+-define(MAX_INT64, (1 bsl 63 - 1)).
+
+-define(PAX_GNU_SPARSE_NUMBLOCKS, <<"GNU.sparse.numblocks">>).
+-define(PAX_GNU_SPARSE_OFFSET, <<"GNU.sparse.offset">>).
+-define(PAX_GNU_SPARSE_NUMBYTES, <<"GNU.sparse.numbytes">>).
+-define(PAX_GNU_SPARSE_MAP, <<"GNU.sparse.map">>).
+-define(PAX_GNU_SPARSE_NAME, <<"GNU.sparse.name">>).
+-define(PAX_GNU_SPARSE_MAJOR, <<"GNU.sparse.major">>).
+-define(PAX_GNU_SPARSE_MINOR, <<"GNU.sparse.minor">>).
+-define(PAX_GNU_SPARSE_SIZE, <<"GNU.sparse.size">>).
+-define(PAX_GNU_SPARSE_REALSIZE, <<"GNU.sparse.realsize">>).
+
+-define(V7_NAME, 0).
+-define(V7_NAME_LEN, 100).
+-define(V7_MODE, 100).
+-define(V7_MODE_LEN, 8).
+-define(V7_UID, 108).
+-define(V7_UID_LEN, 8).
+-define(V7_GID, 116).
+-define(V7_GID_LEN, 8).
+-define(V7_SIZE, 124).
+-define(V7_SIZE_LEN, 12).
+-define(V7_MTIME, 136).
+-define(V7_MTIME_LEN, 12).
+-define(V7_CHKSUM, 148).
+-define(V7_CHKSUM_LEN, 8).
+-define(V7_TYPE, 156).
+-define(V7_TYPE_LEN, 1).
+-define(V7_LINKNAME, 157).
+-define(V7_LINKNAME_LEN, 100).
+
+-define(STAR_TRAILER, 508).
+-define(STAR_TRAILER_LEN, 4).
+
+-define(USTAR_MAGIC, 257).
+-define(USTAR_MAGIC_LEN, 6).
+-define(USTAR_VERSION, 263).
+-define(USTAR_VERSION_LEN, 2).
+-define(USTAR_UNAME, 265).
+-define(USTAR_UNAME_LEN, 32).
+-define(USTAR_GNAME, 297).
+-define(USTAR_GNAME_LEN, 32).
+-define(USTAR_DEVMAJ, 329).
+-define(USTAR_DEVMAJ_LEN, 8).
+-define(USTAR_DEVMIN, 337).
+-define(USTAR_DEVMIN_LEN, 8).
+-define(USTAR_PREFIX, 345).
+-define(USTAR_PREFIX_LEN, 155).
+
+-define(GNU_MAGIC, 257).
+-define(GNU_MAGIC_LEN, 6).
+-define(GNU_VERSION, 263).
+-define(GNU_VERSION_LEN, 2).
+
+%% ?BLOCK_SIZE of zero-bytes.
+%% Two of these in a row mark the end of an archive.
+-define(ZERO_BLOCK, <<0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0>>).
+
+-define(BILLION, 1000000000).
+
+-define(EPOCH, {{1970,1,1}, {0,0,0}}).
diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl
index 20de06fd0b..d6fd1e3ea1 100644
--- a/lib/stdlib/src/ets.erl
+++ b/lib/stdlib/src/ets.erl
@@ -51,8 +51,8 @@
-type tab() :: atom() | tid().
-type type() :: set | ordered_set | bag | duplicate_bag.
-type continuation() :: '$end_of_table'
- | {tab(),integer(),integer(),binary(),list(),integer()}
- | {tab(),_,_,integer(),binary(),list(),integer(),integer()}.
+ | {tab(),integer(),integer(),comp_match_spec(),list(),integer()}
+ | {tab(),_,_,integer(),comp_match_spec(),list(),integer(),integer()}.
-opaque tid() :: integer().
@@ -488,7 +488,7 @@ update_element(_, _, _) ->
%%% End of BIFs
--opaque comp_match_spec() :: binary(). %% this one is REALLY opaque
+-opaque comp_match_spec() :: reference().
-spec match_spec_run(List, CompiledMatchSpec) -> list() when
List :: [tuple()],
@@ -505,28 +505,28 @@ match_spec_run(List, CompiledMS) ->
repair_continuation('$end_of_table', _) ->
'$end_of_table';
%% ordered_set
-repair_continuation(Untouched = {Table,Lastkey,EndCondition,N2,Bin,L2,N3,N4}, MS)
+repair_continuation(Untouched = {Table,Lastkey,EndCondition,N2,MSRef,L2,N3,N4}, MS)
when %% (is_atom(Table) or is_integer(Table)),
is_integer(N2),
- byte_size(Bin) =:= 0,
+ %% is_reference(MSRef),
is_list(L2),
is_integer(N3),
is_integer(N4) ->
- case ets:is_compiled_ms(Bin) of
+ case ets:is_compiled_ms(MSRef) of
true ->
Untouched;
false ->
{Table,Lastkey,EndCondition,N2,ets:match_spec_compile(MS),L2,N3,N4}
end;
%% set/bag/duplicate_bag
-repair_continuation(Untouched = {Table,N1,N2,Bin,L,N3}, MS)
+repair_continuation(Untouched = {Table,N1,N2,MSRef,L,N3}, MS)
when %% (is_atom(Table) or is_integer(Table)),
is_integer(N1),
is_integer(N2),
- byte_size(Bin) =:= 0,
+ %% is_reference(MSRef),
is_list(L),
is_integer(N3) ->
- case ets:is_compiled_ms(Bin) of
+ case ets:is_compiled_ms(MSRef) of
true ->
Untouched;
false ->
diff --git a/lib/stdlib/src/filelib.erl b/lib/stdlib/src/filelib.erl
index 7029389e2f..daa18da9aa 100644
--- a/lib/stdlib/src/filelib.erl
+++ b/lib/stdlib/src/filelib.erl
@@ -24,6 +24,7 @@
-export([fold_files/5, last_modified/1, file_size/1, ensure_dir/1]).
-export([wildcard/3, is_dir/2, is_file/2, is_regular/2]).
-export([fold_files/6, last_modified/2, file_size/2]).
+-export([find_file/2, find_file/3, find_source/1, find_source/2, find_source/3]).
%% For debugging/testing.
-export([compile_wildcard/1]).
@@ -517,3 +518,124 @@ eval_list_dir(Dir, erl_prim_loader) ->
end;
eval_list_dir(Dir, Mod) ->
Mod:list_dir(Dir).
+
+%% Getting the rules to use for file search
+
+keep_dir_search_rules(Rules) ->
+ [T || {_,_}=T <- Rules].
+
+keep_suffix_search_rules(Rules) ->
+ [T || {_,_,_}=T <- Rules].
+
+get_search_rules() ->
+ case application:get_env(kernel, source_search_rules) of
+ undefined -> default_search_rules();
+ {ok, []} -> default_search_rules();
+ {ok, R} when is_list(R) -> R
+ end.
+
+default_search_rules() ->
+ [%% suffix-speficic rules for source search
+ {".beam", ".erl", erl_source_search_rules()},
+ {".erl", ".yrl", []},
+ {"", ".src", erl_source_search_rules()},
+ {".so", ".c", c_source_search_rules()},
+ {".o", ".c", c_source_search_rules()},
+ {"", ".c", c_source_search_rules()},
+ {"", ".in", basic_source_search_rules()},
+ %% plain old directory rules, backwards compatible
+ {"", ""},
+ {"ebin","src"},
+ {"ebin","esrc"}
+ ].
+
+basic_source_search_rules() ->
+ (erl_source_search_rules()
+ ++ c_source_search_rules()).
+
+erl_source_search_rules() ->
+ [{"ebin","src"}, {"ebin","esrc"}].
+
+c_source_search_rules() ->
+ [{"priv","c_src"}, {"priv","src"}, {"bin","c_src"}, {"bin","src"}, {"", "src"}].
+
+%% Looks for a file relative to a given directory
+
+-type find_file_rule() :: {ObjDirSuffix::string(), SrcDirSuffix::string()}.
+
+-spec find_file(filename(), filename()) ->
+ {ok, filename()} | {error, not_found}.
+find_file(Filename, Dir) ->
+ find_file(Filename, Dir, []).
+
+-spec find_file(filename(), filename(), [find_file_rule()]) ->
+ {ok, filename()} | {error, not_found}.
+find_file(Filename, Dir, []) ->
+ find_file(Filename, Dir, get_search_rules());
+find_file(Filename, Dir, Rules) ->
+ try_dir_rules(keep_dir_search_rules(Rules), Filename, Dir).
+
+%% Looks for a source file relative to the object file name and directory
+
+-type find_source_rule() :: {ObjExtension::string(), SrcExtension::string(),
+ [find_file_rule()]}.
+
+-spec find_source(filename()) ->
+ {ok, filename()} | {error, not_found}.
+find_source(FilePath) ->
+ find_source(filename:basename(FilePath), filename:dirname(FilePath)).
+
+-spec find_source(filename(), filename()) ->
+ {ok, filename()} | {error, not_found}.
+find_source(Filename, Dir) ->
+ find_source(Filename, Dir, []).
+
+-spec find_source(filename(), filename(), [find_source_rule()]) ->
+ {ok, filename()} | {error, not_found}.
+find_source(Filename, Dir, []) ->
+ find_source(Filename, Dir, get_search_rules());
+find_source(Filename, Dir, Rules) ->
+ try_suffix_rules(keep_suffix_search_rules(Rules), Filename, Dir).
+
+try_suffix_rules(Rules, Filename, Dir) ->
+ Ext = filename:extension(Filename),
+ try_suffix_rules(Rules, filename:rootname(Filename, Ext), Dir, Ext).
+
+try_suffix_rules([{Ext,Src,Rules}|Rest], Root, Dir, Ext)
+ when is_list(Src), is_list(Rules) ->
+ case try_dir_rules(add_local_search(Rules), Root ++ Src, Dir) of
+ {ok, File} -> {ok, File};
+ _Other ->
+ try_suffix_rules(Rest, Root, Dir, Ext)
+ end;
+try_suffix_rules([_|Rest], Root, Dir, Ext) ->
+ try_suffix_rules(Rest, Root, Dir, Ext);
+try_suffix_rules([], _Root, _Dir, _Ext) ->
+ {error, not_found}.
+
+%% ensuring we check the directory of the object file before any other directory
+add_local_search(Rules) ->
+ Local = {"",""},
+ [Local] ++ lists:filter(fun (X) -> X =/= Local end, Rules).
+
+try_dir_rules([{From, To}|Rest], Filename, Dir)
+ when is_list(From), is_list(To) ->
+ case try_dir_rule(Dir, Filename, From, To) of
+ {ok, File} -> {ok, File};
+ error -> try_dir_rules(Rest, Filename, Dir)
+ end;
+try_dir_rules([], _Filename, _Dir) ->
+ {error, not_found}.
+
+try_dir_rule(Dir, Filename, From, To) ->
+ case lists:suffix(From, Dir) of
+ true ->
+ NewDir = lists:sublist(Dir, 1, length(Dir)-length(From))++To,
+ Src = filename:join(NewDir, Filename),
+ case is_regular(Src) of
+ true -> {ok, Src};
+ false -> error
+ end;
+ false ->
+ error
+ end.
diff --git a/lib/stdlib/src/filename.erl b/lib/stdlib/src/filename.erl
index c4586171ca..b5df5c9d37 100644
--- a/lib/stdlib/src/filename.erl
+++ b/lib/stdlib/src/filename.erl
@@ -19,6 +19,9 @@
%%
-module(filename).
+-deprecated({find_src,1,next_major_release}).
+-deprecated({find_src,2,next_major_release}).
+
%% Purpose: Provides generic manipulation of filenames.
%%
%% Generally, these functions accept filenames in the native format
@@ -34,8 +37,9 @@
-export([absname/1, absname/2, absname_join/2,
basename/1, basename/2, dirname/1,
extension/1, join/1, join/2, pathtype/1,
- rootname/1, rootname/2, split/1, nativename/1]).
--export([find_src/1, find_src/2, flatten/1]).
+ rootname/1, rootname/2, split/1, flatten/1, nativename/1,
+ safe_relative_path/1]).
+-export([find_src/1, find_src/2]). % deprecated
-export([basedir/2, basedir/3]).
%% Undocumented and unsupported exports.
@@ -750,7 +754,45 @@ separators() ->
_ -> {false, false}
end.
+-spec safe_relative_path(Filename) -> 'unsafe' | SafeFilename when
+ Filename :: file:name_all(),
+ SafeFilename :: file:name_all().
+
+safe_relative_path(Path) ->
+ case pathtype(Path) of
+ relative ->
+ Cs0 = split(Path),
+ safe_relative_path_1(Cs0, []);
+ _ ->
+ unsafe
+ end.
+safe_relative_path_1(["."|T], Acc) ->
+ safe_relative_path_1(T, Acc);
+safe_relative_path_1([<<".">>|T], Acc) ->
+ safe_relative_path_1(T, Acc);
+safe_relative_path_1([".."|T], Acc) ->
+ climb(T, Acc);
+safe_relative_path_1([<<"..">>|T], Acc) ->
+ climb(T, Acc);
+safe_relative_path_1([H|T], Acc) ->
+ safe_relative_path_1(T, [H|Acc]);
+safe_relative_path_1([], []) ->
+ [];
+safe_relative_path_1([], Acc) ->
+ join(lists:reverse(Acc)).
+
+climb(_, []) ->
+ unsafe;
+climb(T, [_|Acc]) ->
+ safe_relative_path_1(T, Acc).
+
+%% NOTE: The find_src/1/2 functions are deprecated; they try to do too much
+%% at once and are not a good fit for this module. Parts of the code have
+%% been moved to filelib:find_file/2 instead. Only this part of this
+%% module is allowed to call the filelib module; such mutual dependency
+%% should otherwise be avoided! This code should eventually be removed.
+%%
%% find_src(Module) --
%% find_src(Module, Rules) --
@@ -793,14 +835,7 @@ separators() ->
| {'d', atom()},
ErrorReason :: 'non_existing' | 'preloaded' | 'interpreted'.
find_src(Mod) ->
- Default = [{"", ""}, {"ebin", "src"}, {"ebin", "esrc"}],
- Rules =
- case application:get_env(kernel, source_search_rules) of
- undefined -> Default;
- {ok, []} -> Default;
- {ok, R} when is_list(R) -> R
- end,
- find_src(Mod, Rules).
+ find_src(Mod, []).
-spec find_src(Beam, Rules) -> {SourceFile, Options}
| {error, {ErrorReason, Module}} when
@@ -816,44 +851,47 @@ find_src(Mod) ->
ErrorReason :: 'non_existing' | 'preloaded' | 'interpreted'.
find_src(Mod, Rules) when is_atom(Mod) ->
find_src(atom_to_list(Mod), Rules);
-find_src(File0, Rules) when is_list(File0) ->
- Mod = list_to_atom(basename(File0, ".erl")),
- File = rootname(File0, ".erl"),
- case readable_file(File++".erl") of
- true ->
- try_file(File, Mod, Rules);
- false ->
- try_file(undefined, Mod, Rules)
- end.
-
-try_file(File, Mod, Rules) ->
+find_src(ModOrFile, Rules) when is_list(ModOrFile) ->
+ Extension = ".erl",
+ Mod = list_to_atom(basename(ModOrFile, Extension)),
case code:which(Mod) of
Possibly_Rel_Path when is_list(Possibly_Rel_Path) ->
- {ok, Cwd} = file:get_cwd(),
- Path = join(Cwd, Possibly_Rel_Path),
- try_file(File, Path, Mod, Rules);
+ {ok, Cwd} = file:get_cwd(),
+ ObjPath = make_abs_path(Cwd, Possibly_Rel_Path),
+ find_src_1(ModOrFile, ObjPath, Mod, Extension, Rules);
Ecode when is_atom(Ecode) -> % Ecode :: ecode()
{error, {Ecode, Mod}}
end.
%% At this point, the Mod is known to be valid.
%% If the source name is not known, find it.
-%% Then get the compilation options.
-%% Returns: {SrcFile, Options}
+find_src_1(ModOrFile, ObjPath, Mod, Extension, Rules) ->
+ %% The documentation says this function must return the found path
+ %% without extension in all cases. Also, ModOrFile could be given with
+ %% or without extension. Hence the calls to rootname below.
+ ModOrFileRoot = rootname(ModOrFile, Extension),
+ case filelib:is_regular(ModOrFileRoot++Extension) of
+ true ->
+ find_src_2(ModOrFileRoot, Mod);
+ false ->
+ SrcName = basename(ObjPath, code:objfile_extension()) ++ Extension,
+ case filelib:find_file(SrcName, dirname(ObjPath), Rules) of
+ {ok, SrcFile} ->
+ find_src_2(rootname(SrcFile, Extension), Mod);
+ Error ->
+ Error
+ end
+ end.
-try_file(undefined, ObjFilename, Mod, Rules) ->
- case get_source_file(ObjFilename, Mod, Rules) of
- {ok, File} -> try_file(File, ObjFilename, Mod, Rules);
- Error -> Error
- end;
-try_file(Src, _ObjFilename, Mod, _Rules) ->
+%% Get the compilation options and return {SrcFileRoot, Options}
+find_src_2(SrcRoot, Mod) ->
List = case Mod:module_info(compile) of
none -> [];
List0 -> List0
end,
Options = proplists:get_value(options, List, []),
{ok, Cwd} = file:get_cwd(),
- AbsPath = make_abs_path(Cwd, Src),
+ AbsPath = make_abs_path(Cwd, SrcRoot),
{AbsPath, filter_options(dirname(AbsPath), Options, [])}.
%% Filters the options.
@@ -884,42 +922,6 @@ filter_options(Base, [_|Rest], Result) ->
filter_options(_Base, [], Result) ->
Result.
-%% Gets the source file given path of object code and module name.
-
-get_source_file(Obj, Mod, Rules) ->
- source_by_rules(dirname(Obj), atom_to_list(Mod), Rules).
-
-source_by_rules(Dir, Base, [{From, To}|Rest]) ->
- case try_rule(Dir, Base, From, To) of
- {ok, File} -> {ok, File};
- error -> source_by_rules(Dir, Base, Rest)
- end;
-source_by_rules(_Dir, _Base, []) ->
- {error, source_file_not_found}.
-
-try_rule(Dir, Base, From, To) ->
- case lists:suffix(From, Dir) of
- true ->
- NewDir = lists:sublist(Dir, 1, length(Dir)-length(From))++To,
- Src = join(NewDir, Base),
- case readable_file(Src++".erl") of
- true -> {ok, Src};
- false -> error
- end;
- false ->
- error
- end.
-
-readable_file(File) ->
- case file:read_file_info(File) of
- {ok, #file_info{type=regular, access=read}} ->
- true;
- {ok, #file_info{type=regular, access=read_write}} ->
- true;
- _Other ->
- false
- end.
-
make_abs_path(BasePath, Path) ->
join(BasePath, Path).
diff --git a/lib/stdlib/src/gen_event.erl b/lib/stdlib/src/gen_event.erl
index 4839fe4f2c..0aebf1bdc5 100644
--- a/lib/stdlib/src/gen_event.erl
+++ b/lib/stdlib/src/gen_event.erl
@@ -778,7 +778,7 @@ stop_handlers([], _) ->
[].
%% Message from the release_handler.
-%% The list of modules got to be a set !
+%% The list of modules got to be a set, i.e. no duplicate elements!
get_modules(MSL) ->
Mods = [Handler#handler.module || Handler <- MSL],
ordsets:to_list(ordsets:from_list(Mods)).
diff --git a/lib/stdlib/src/gen_fsm.erl b/lib/stdlib/src/gen_fsm.erl
index 6e7528fd98..e925a75fe8 100644
--- a/lib/stdlib/src/gen_fsm.erl
+++ b/lib/stdlib/src/gen_fsm.erl
@@ -273,7 +273,7 @@ start_timer(Time, Msg) ->
send_event_after(Time, Event) ->
erlang:start_timer(Time, self(), {'$gen_event', Event}).
-%% Returns the remaing time for the timer if Ref referred to
+%% Returns the remaining time for the timer if Ref referred to
%% an active timer/send_event_after, false otherwise.
cancel_timer(Ref) ->
case erlang:cancel_timer(Ref) of
diff --git a/lib/stdlib/src/gen_statem.erl b/lib/stdlib/src/gen_statem.erl
index 018aca90e6..cacc932ec4 100644
--- a/lib/stdlib/src/gen_statem.erl
+++ b/lib/stdlib/src/gen_statem.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2016. All Rights Reserved.
+%% Copyright Ericsson AB 2016-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -47,15 +47,17 @@
%% Type exports for templates and callback modules
-export_type(
[event_type/0,
- init_result/0,
callback_mode_result/0,
- state_function_result/0,
- handle_event_result/0,
+ init_result/1,
state_enter_result/1,
event_handler_result/1,
reply_action/0,
enter_action/0,
action/0]).
+%% Old types, not advertised
+-export_type(
+ [state_function_result/0,
+ handle_event_result/0]).
%% Type that is exported just to be documented
-export_type([transition_option/0]).
@@ -143,9 +145,10 @@
{'reply', % Reply to a caller
From :: from(), Reply :: term()}.
--type init_result() ::
- {ok, state(), data()} |
- {ok, state(), data(), [action()] | action()} |
+-type init_result(StateType) ::
+ {ok, State :: StateType, Data :: data()} |
+ {ok, State :: StateType, Data :: data(),
+ Actions :: [action()] | action()} |
'ignore' |
{'stop', Reason :: term()}.
@@ -182,12 +185,23 @@
'keep_state_and_data' | % {keep_state_and_data,[]}
{'keep_state_and_data', % Keep state and data -> only actions
Actions :: [ActionType] | ActionType} |
+ %%
+ {'repeat_state', % {repeat_state,NewData,[]}
+ NewData :: data()} |
+ {'repeat_state', % Repeat state, change data
+ NewData :: data(),
+ Actions :: [ActionType] | ActionType} |
+ 'repeat_state_and_data' | % {repeat_state_and_data,[]}
+ {'repeat_state_and_data', % Repeat state and data -> only actions
+ Actions :: [ActionType] | ActionType} |
+ %%
'stop' | % {stop,normal}
{'stop', % Stop the server
Reason :: term()} |
{'stop', % Stop the server
Reason :: term(),
NewData :: data()} |
+ %%
{'stop_and_reply', % Reply then stop the server
Reason :: term(),
Replies :: [reply_action()] | reply_action()} |
@@ -201,7 +215,7 @@
%% the server is not running until this function has returned
%% an {ok, ...} tuple. Thereafter the state callbacks are called
%% for all events to this server.
--callback init(Args :: term()) -> init_result().
+-callback init(Args :: term()) -> init_result(state()).
%% This callback shall return the callback mode of the callback module.
%%
@@ -275,6 +289,8 @@
-optional_callbacks(
[init/1, % One may use enter_loop/5,6,7 instead
format_status/2, % Has got a default implementation
+ terminate/3, % Has got a default implementation
+ code_change/4, % Only needed by advanced soft upgrade
%%
state_name/3, % Example for callback_mode() =:= state_functions:
%% there has to be a StateName/3 callback function
@@ -304,12 +320,16 @@ event_type({call,From}) ->
from(From);
event_type(Type) ->
case Type of
+ {call,From} ->
+ from(From);
cast ->
true;
info ->
true;
timeout ->
true;
+ state_timeout ->
+ true;
internal ->
true;
_ ->
@@ -588,6 +608,22 @@ enter(Module, Opts, State, Data, Server, Actions, Parent) ->
true ->
[Actions,{postpone,false}]
end,
+ TimerRefs = #{},
+ %% Key: timer ref
+ %% Value: the timer type i.e the timer's event type
+ %%
+ TimerTypes = #{},
+ %% Key: timer type i.e the timer's event type
+ %% Value: timer ref
+ %%
+ %% We add a timer to both timer_refs and timer_types
+ %% when we start it. When we request an asynchronous
+ %% timer cancel we remove it from timer_types. When
+ %% the timer cancel message arrives we remove it from
+ %% timer_refs.
+ %%
+ Hibernate = false,
+ CancelTimers = 0,
S = #{
callback_mode => undefined,
state_enter => false,
@@ -596,25 +632,25 @@ enter(Module, Opts, State, Data, Server, Actions, Parent) ->
state => State,
data => Data,
postponed => P,
- %% The rest of the fields are set from to the arguments to
- %% loop_event_actions/10 when it finally loops back to loop/3
- %% in loop_events/10
%%
- %% Marker for initial state, cleared immediately when used
- init_state => true
+ %% The following fields are finally set from to the arguments to
+ %% loop_event_actions/9 when it finally loops back to loop/3
+ %% in loop_event_result/11
+ timer_refs => TimerRefs,
+ timer_types => TimerTypes,
+ hibernate => Hibernate,
+ cancel_timers => CancelTimers
},
NewDebug = sys_debug(Debug, S, State, {enter,Event,State}),
case call_callback_mode(S) of
{ok,NewS} ->
- TimerRefs = #{},
- TimerTypes = #{},
loop_event_actions(
- Parent, NewDebug, NewS, TimerRefs, TimerTypes,
- Events, Event, State, Data, NewActions);
+ Parent, NewDebug, NewS,
+ Events, Event, State, Data, NewActions, true);
{Class,Reason,Stacktrace} ->
terminate(
- Class, Reason, Stacktrace,
- NewDebug, S, [Event|Events])
+ Class, Reason, Stacktrace, NewDebug,
+ S, [Event|Events])
end.
%%%==========================================================================
@@ -683,9 +719,7 @@ system_continue(Parent, Debug, S) ->
loop(Parent, Debug, S).
system_terminate(Reason, _Parent, Debug, S) ->
- terminate(
- exit, Reason, ?STACKTRACE(),
- Debug, S, []).
+ terminate(exit, Reason, ?STACKTRACE(), Debug, S, []).
system_code_change(
#{module := Module,
@@ -796,23 +830,22 @@ wakeup_from_hibernate(Parent, Debug, S) ->
%% and detours through sys:handle_system_message/7 and proc_lib:hibernate/3
%% Entry point for system_continue/3
-loop(Parent, Debug, #{hibernate := Hibernate} = S) ->
- case Hibernate of
- true ->
- %% Does not return but restarts process at
- %% wakeup_from_hibernate/3 that jumps to loop_receive/3
- proc_lib:hibernate(
- ?MODULE, wakeup_from_hibernate, [Parent,Debug,S]),
- error(
- {should_not_have_arrived_here_but_instead_in,
- {wakeup_from_hibernate,3}});
- false ->
- loop_receive(Parent, Debug, S)
- end.
+loop(Parent, Debug, #{hibernate := true, cancel_timers := 0} = S) ->
+ loop_hibernate(Parent, Debug, S);
+loop(Parent, Debug, S) ->
+ loop_receive(Parent, Debug, S).
+
+loop_hibernate(Parent, Debug, S) ->
+ %% Does not return but restarts process at
+ %% wakeup_from_hibernate/3 that jumps to loop_receive/3
+ proc_lib:hibernate(
+ ?MODULE, wakeup_from_hibernate, [Parent,Debug,S]),
+ error(
+ {should_not_have_arrived_here_but_instead_in,
+ {wakeup_from_hibernate,3}}).
%% Entry point for wakeup_from_hibernate/3
-loop_receive(
- Parent, Debug, #{timer_refs := TimerRefs, timer_types := TimerTypes} = S) ->
+loop_receive(Parent, Debug, S) ->
receive
Msg ->
case Msg of
@@ -821,30 +854,87 @@ loop_receive(
%% Does not return but tail recursively calls
%% system_continue/3 that jumps to loop/3
sys:handle_system_msg(
- Req, Pid, Parent, ?MODULE, Debug, S, Hibernate);
+ Req, Pid, Parent, ?MODULE, Debug, S,
+ Hibernate);
{'EXIT',Parent,Reason} = EXIT ->
- %% EXIT is not a 2-tuple and therefore
- %% not an event and has no event_type(),
- %% but this will stand out in the crash report...
- terminate(
- exit, Reason, ?STACKTRACE(), Debug, S, [EXIT]);
+ %% EXIT is not a 2-tuple therefore
+ %% not an event but this will stand out
+ %% in the crash report...
+ Q = [EXIT],
+ terminate(exit, Reason, ?STACKTRACE(), Debug, S, Q);
{timeout,TimerRef,TimerMsg} ->
+ #{timer_refs := TimerRefs,
+ timer_types := TimerTypes,
+ hibernate := Hibernate} = S,
case TimerRefs of
#{TimerRef := TimerType} ->
- Event = {TimerType,TimerMsg},
- %% Unregister the triggered timeout
+ %% We know of this timer; is it a running
+ %% timer or a timer being cancelled that
+ %% managed to send a late timeout message?
+ case TimerTypes of
+ #{TimerType := TimerRef} ->
+ %% The timer type maps back to this
+ %% timer ref, so it was a running timer
+ Event = {TimerType,TimerMsg},
+ %% Unregister the triggered timeout
+ NewTimerRefs =
+ maps:remove(TimerRef, TimerRefs),
+ NewTimerTypes =
+ maps:remove(TimerType, TimerTypes),
+ loop_receive_result(
+ Parent, Debug,
+ S#{
+ timer_refs := NewTimerRefs,
+ timer_types := NewTimerTypes},
+ Hibernate,
+ Event);
+ _ ->
+ %% This was a late timeout message
+ %% from timer being cancelled, so
+ %% ignore it and expect a cancel_timer
+ %% msg shortly
+ loop_receive(Parent, Debug, S)
+ end;
+ _ ->
+ %% Not our timer; present it as an event
+ Event = {info,Msg},
loop_receive_result(
- Parent, Debug, S,
- maps:remove(TimerRef, TimerRefs),
- maps:remove(TimerType, TimerTypes),
- Event);
+ Parent, Debug, S, Hibernate, Event)
+ end;
+ {cancel_timer,TimerRef,_} ->
+ #{timer_refs := TimerRefs,
+ cancel_timers := CancelTimers,
+ hibernate := Hibernate} = S,
+ case TimerRefs of
+ #{TimerRef := _} ->
+ %% We must have requested a cancel
+ %% of this timer so it is already
+ %% removed from TimerTypes
+ NewTimerRefs =
+ maps:remove(TimerRef, TimerRefs),
+ NewCancelTimers = CancelTimers - 1,
+ NewS =
+ S#{
+ timer_refs := NewTimerRefs,
+ cancel_timers := NewCancelTimers},
+ if
+ Hibernate =:= true, NewCancelTimers =:= 0 ->
+ %% No more cancel_timer msgs to expect;
+ %% we can hibernate
+ loop_hibernate(Parent, Debug, NewS);
+ NewCancelTimers >= 0 -> % Assert
+ loop_receive(Parent, Debug, NewS)
+ end;
_ ->
+ %% Not our cancel_timer msg;
+ %% present it as an event
Event = {info,Msg},
loop_receive_result(
- Parent, Debug, S,
- TimerRefs, TimerTypes, Event)
+ Parent, Debug, S, Hibernate, Event)
end;
_ ->
+ %% External msg
+ #{hibernate := Hibernate} = S,
Event =
case Msg of
{'$gen_call',From,Request} ->
@@ -855,208 +945,212 @@ loop_receive(
{info,Msg}
end,
loop_receive_result(
- Parent, Debug, S,
- TimerRefs, TimerTypes, Event)
+ Parent, Debug, S, Hibernate, Event)
end
end.
loop_receive_result(
- Parent, Debug, #{state := State} = S,
- TimerRefs, TimerTypes, Event) ->
- %% The fields 'timer_refs', 'timer_types' and 'hibernate'
- %% are now invalid in state map S - they will be recalculated
- %% and restored when we return to loop/3
- %%
+ Parent, Debug,
+ #{state := State,
+ timer_types := TimerTypes, cancel_timers := CancelTimers} = S,
+ Hibernate, Event) ->
+ %% From now the 'hibernate' field in S is invalid
+ %% and will be restored when looping back
+ %% in loop_event_result/11
NewDebug = sys_debug(Debug, S, State, {in,Event}),
- %% Here the queue of not yet handled events is created
+ %% Here is the queue of not yet handled events created
Events = [],
- Hibernate = false,
- loop_event(
- Parent, NewDebug, S, TimerRefs, TimerTypes, Events, Event, Hibernate).
+ %% Cancel any running event timer
+ case
+ cancel_timer_by_type(timeout, TimerTypes, CancelTimers)
+ of
+ {_,CancelTimers} ->
+ %% No timer cancelled
+ loop_event(Parent, NewDebug, S, Events, Event, Hibernate);
+ {NewTimerTypes,NewCancelTimers} ->
+ %% The timer is removed from NewTimerTypes but
+ %% remains in TimerRefs until we get
+ %% the cancel_timer msg
+ NewS =
+ S#{
+ timer_types := NewTimerTypes,
+ cancel_timers := NewCancelTimers},
+ loop_event(Parent, NewDebug, NewS, Events, Event, Hibernate)
+ end.
%% Entry point for handling an event, received or enqueued
loop_event(
- Parent, Debug, #{state := State, data := Data} = S, TimerRefs, TimerTypes,
+ Parent, Debug,
+ #{state := State, data := Data} = S,
Events, {Type,Content} = Event, Hibernate) ->
%%
- %% If Hibernate is true here it can only be
+ %% If (this old) Hibernate is true here it can only be
%% because it was set from an event action
- %% and we did not go into hibernation since there
- %% were events in queue, so we do what the user
+ %% and we did not go into hibernation since there were
+ %% events in queue, so we do what the user
%% might rely on i.e collect garbage which
%% would have happened if we actually hibernated
%% and immediately was awakened
Hibernate andalso garbage_collect(),
case call_state_function(S, Type, Content, State, Data) of
{ok,Result,NewS} ->
- %% Cancel event timeout
- {NewTimerRefs,NewTimerTypes} =
- cancel_timer_by_type(
- timeout, TimerRefs, TimerTypes),
- {NewData,NextState,Actions} =
+ {NextState,NewData,Actions,EnterCall} =
parse_event_result(
- true, Debug, NewS, Result,
- Events, Event, State, Data),
+ true, Debug, NewS,
+ Events, Event, State, Data, Result),
loop_event_actions(
- Parent, Debug, S, NewTimerRefs, NewTimerTypes,
- Events, Event, NextState, NewData, Actions);
+ Parent, Debug, NewS,
+ Events, Event, NextState, NewData, Actions, EnterCall);
{Class,Reason,Stacktrace} ->
terminate(
- Class, Reason, Stacktrace, Debug, S, [Event|Events])
+ Class, Reason, Stacktrace, Debug, S,
+ [Event|Events])
end.
loop_event_actions(
Parent, Debug,
- #{state := State, state_enter := StateEnter} = S, TimerRefs, TimerTypes,
- Events, Event, NextState, NewData, Actions) ->
+ #{state := State, state_enter := StateEnter} = S,
+ Events, Event, NextState, NewData,
+ Actions, EnterCall) ->
+ %% Hibernate is reborn here as false being
+ %% the default value from parse_actions/4
case parse_actions(Debug, S, State, Actions) of
{ok,NewDebug,Hibernate,TimeoutsR,Postpone,NextEventsR} ->
if
- StateEnter, NextState =/= State ->
+ StateEnter, EnterCall ->
loop_event_enter(
- Parent, NewDebug, S, TimerRefs, TimerTypes,
+ Parent, NewDebug, S,
Events, Event, NextState, NewData,
Hibernate, TimeoutsR, Postpone, NextEventsR);
- StateEnter ->
- case maps:is_key(init_state, S) of
- true ->
- %% Avoid infinite loop in initial state
- %% with state entry events
- NewS = maps:remove(init_state, S),
- loop_event_enter(
- Parent, NewDebug, NewS, TimerRefs, TimerTypes,
- Events, Event, NextState, NewData,
- Hibernate, TimeoutsR, Postpone, NextEventsR);
- false ->
- loop_event_result(
- Parent, NewDebug, S, TimerRefs, TimerTypes,
- Events, Event, NextState, NewData,
- Hibernate, TimeoutsR, Postpone, NextEventsR)
- end;
true ->
loop_event_result(
- Parent, NewDebug, S, TimerRefs, TimerTypes,
+ Parent, NewDebug, S,
Events, Event, NextState, NewData,
Hibernate, TimeoutsR, Postpone, NextEventsR)
end;
{Class,Reason,Stacktrace} ->
terminate(
- Class, Reason, Stacktrace,
- Debug, S#{data := NewData}, [Event|Events])
+ Class, Reason, Stacktrace, Debug, S,
+ [Event|Events])
end.
loop_event_enter(
- Parent, Debug, #{state := State} = S, TimerRefs, TimerTypes,
+ Parent, Debug, #{state := State} = S,
Events, Event, NextState, NewData,
Hibernate, TimeoutsR, Postpone, NextEventsR) ->
case call_state_function(S, enter, State, NextState, NewData) of
{ok,Result,NewS} ->
- {NewerData,_,Actions} =
- parse_event_result(
- false, Debug, NewS, Result,
- Events, Event, NextState, NewData),
- loop_event_enter_actions(
- Parent, Debug, NewS, TimerRefs, TimerTypes,
- Events, Event, NextState, NewerData,
- Hibernate, TimeoutsR, Postpone, NextEventsR, Actions);
+ case parse_event_result(
+ false, Debug, NewS,
+ Events, Event, NextState, NewData, Result) of
+ {_,NewerData,Actions,EnterCall} ->
+ loop_event_enter_actions(
+ Parent, Debug, NewS,
+ Events, Event, NextState, NewerData,
+ Hibernate, TimeoutsR, Postpone, NextEventsR,
+ Actions, EnterCall)
+ end;
{Class,Reason,Stacktrace} ->
terminate(
- Class, Reason, Stacktrace,
- Debug, S#{state := NextState, data := NewData},
+ Class, Reason, Stacktrace, Debug,
+ S#{
+ state := NextState,
+ data := NewData,
+ hibernate := Hibernate},
[Event|Events])
end.
loop_event_enter_actions(
- Parent, Debug, S, TimerRefs, TimerTypes,
+ Parent, Debug, #{state_enter := StateEnter} = S,
Events, Event, NextState, NewData,
- Hibernate, TimeoutsR, Postpone, NextEventsR, Actions) ->
+ Hibernate, TimeoutsR, Postpone, NextEventsR,
+ Actions, EnterCall) ->
case
parse_enter_actions(
- Debug, S, NextState, Actions,
- Hibernate, TimeoutsR)
+ Debug, S, NextState, Actions, Hibernate, TimeoutsR)
of
{ok,NewDebug,NewHibernate,NewTimeoutsR,_,_} ->
- loop_event_result(
- Parent, NewDebug, S, TimerRefs, TimerTypes,
- Events, Event, NextState, NewData,
- NewHibernate, NewTimeoutsR, Postpone, NextEventsR);
+ if
+ StateEnter, EnterCall ->
+ loop_event_enter(
+ Parent, NewDebug, S,
+ Events, Event, NextState, NewData,
+ NewHibernate, NewTimeoutsR, Postpone, NextEventsR);
+ true ->
+ loop_event_result(
+ Parent, NewDebug, S,
+ Events, Event, NextState, NewData,
+ NewHibernate, NewTimeoutsR, Postpone, NextEventsR)
+ end;
{Class,Reason,Stacktrace} ->
terminate(
- Class, Reason, Stacktrace,
- Debug, S#{state := NextState, data := NewData},
+ Class, Reason, Stacktrace, Debug,
+ S#{
+ state := NextState,
+ data := NewData,
+ hibernate := Hibernate},
[Event|Events])
end.
loop_event_result(
- Parent, Debug,
- #{state := State, postponed := P_0} = S, TimerRefs_0, TimerTypes_0,
- Events, Event, NextState, NewData,
+ Parent, Debug_0,
+ #{state := State, postponed := P_0,
+ timer_refs := TimerRefs_0, timer_types := TimerTypes_0,
+ cancel_timers := CancelTimers_0} = S_0,
+ Events_0, Event_0, NextState, NewData,
Hibernate, TimeoutsR, Postpone, NextEventsR) ->
%%
%% All options have been collected and next_events are buffered.
%% Do the actual state transition.
%%
- {NewDebug,P_1} = % Move current event to postponed if Postpone
+ {Debug_1,P_1} = % Move current event to postponed if Postpone
case Postpone of
true ->
- {sys_debug(Debug, S, State, {postpone,Event,State}),
- [Event|P_0]};
+ {sys_debug(Debug_0, S_0, State, {postpone,Event_0,State}),
+ [Event_0|P_0]};
false ->
- {sys_debug(Debug, S, State, {consume,Event,State}),
+ {sys_debug(Debug_0, S_0, State, {consume,Event_0,State}),
P_0}
end,
- {Events_1,NewP,{TimerRefs_1,TimerTypes_1}} =
+ {Events_1,P_2,{TimerTypes_1,CancelTimers_1}} =
%% Move all postponed events to queue and cancel the
%% state timeout if the state changes
if
NextState =:= State ->
- {Events,P_1,{TimerRefs_0,TimerTypes_0}};
+ {Events_0,P_1,{TimerTypes_0,CancelTimers_0}};
true ->
- {lists:reverse(P_1, Events),[],
+ {lists:reverse(P_1, Events_0),
+ [],
cancel_timer_by_type(
- state_timeout, TimerRefs_0, TimerTypes_0)}
+ state_timeout, TimerTypes_0, CancelTimers_0)}
+ %% The state timer is removed from TimerTypes_1
+ %% but remains in TimerRefs_0 until we get
+ %% the cancel_timer msg
end,
- {TimerRefs_2,TimerTypes_2,TimeoutEvents} =
- %% Stop and start timers non-event timers
- parse_timers(TimerRefs_1, TimerTypes_1, TimeoutsR),
+ {TimerRefs_2,TimerTypes_2,CancelTimers_2,TimeoutEvents} =
+ %% Stop and start non-event timers
+ parse_timers(TimerRefs_0, TimerTypes_1, CancelTimers_1, TimeoutsR),
%% Place next events last in reversed queue
Events_2R = lists:reverse(Events_1, NextEventsR),
%% Enqueue immediate timeout events and start event timer
- {NewTimerRefs,NewTimerTypes,Events_3R} =
- process_timeout_events(
- TimerRefs_2, TimerTypes_2, TimeoutEvents, Events_2R),
- NewEvents = lists:reverse(Events_3R),
- loop_events(
- Parent, NewDebug, S, NewTimerRefs, NewTimerTypes,
- NewEvents, Hibernate, NextState, NewData, NewP).
-
-%% Loop until out of enqueued events
-%%
-loop_events(
- Parent, Debug, S, TimerRefs, TimerTypes,
- [] = _Events, Hibernate, State, Data, P) ->
- %% Update S and loop back to loop/3 to receive a new event
- NewS =
- S#{
- state := State,
- data := Data,
- postponed := P,
- hibernate => Hibernate,
- timer_refs => TimerRefs,
- timer_types => TimerTypes},
- loop(Parent, Debug, NewS);
-loop_events(
- Parent, Debug, S, TimerRefs, TimerTypes,
- [Event|Events], Hibernate, State, Data, P) ->
- %% Update S and continue with enqueued events
- NewS =
- S#{
- state := State,
- data := Data,
- postponed := P},
- loop_event(
- Parent, Debug, NewS, TimerRefs, TimerTypes, Events, Event, Hibernate).
-
+ Events_3R = prepend_timeout_events(TimeoutEvents, Events_2R),
+ S_1 =
+ S_0#{
+ state := NextState,
+ data := NewData,
+ postponed := P_2,
+ timer_refs := TimerRefs_2,
+ timer_types := TimerTypes_2,
+ cancel_timers := CancelTimers_2,
+ hibernate := Hibernate},
+ case lists:reverse(Events_3R) of
+ [] ->
+ %% Get a new event
+ loop(Parent, Debug_1, S_1);
+ [Event|Events] ->
+ %% Loop until out of enqueued events
+ loop_event(Parent, Debug_1, S_1, Events, Event, Hibernate)
+ end.
%%---------------------------------------------------------------------------
@@ -1069,19 +1163,6 @@ call_callback_mode(#{module := Module} = S) ->
catch
CallbackMode ->
callback_mode_result(S, CallbackMode);
- error:undef ->
- %% Process undef to check for the simple mistake
- %% of calling a nonexistent state function
- %% to make the undef more precise
- case erlang:get_stacktrace() of
- [{Module,callback_mode,[]=Args,_}
- |Stacktrace] ->
- {error,
- {undef_callback,{Module,callback_mode,Args}},
- Stacktrace};
- Stacktrace ->
- {error,undef,Stacktrace}
- end;
Class:Reason ->
{Class,Reason,erlang:get_stacktrace()}
end.
@@ -1126,8 +1207,7 @@ parse_callback_mode(_, _CBMode, StateEnter) ->
call_state_function(
- #{callback_mode := undefined} = S,
- Type, Content, State, Data) ->
+ #{callback_mode := undefined} = S, Type, Content, State, Data) ->
case call_callback_mode(S) of
{ok,NewS} ->
call_state_function(NewS, Type, Content, State, Data);
@@ -1135,13 +1215,12 @@ call_state_function(
Error
end;
call_state_function(
- #{callback_mode := CallbackMode,
- module := Module} = S,
+ #{callback_mode := CallbackMode, module := Module} = S,
Type, Content, State, Data) ->
try
case CallbackMode of
state_functions ->
- erlang:apply(Module, State, [Type,Content,Data]);
+ Module:State(Type, Content, Data);
handle_event_function ->
Module:handle_event(Type, Content, State, Data)
end
@@ -1151,41 +1230,6 @@ call_state_function(
catch
Result ->
{ok,Result,S};
- error:badarg ->
- case erlang:get_stacktrace() of
- [{erlang,apply,
- [Module,State,[Type,Content,Data]=Args],
- _}
- |Stacktrace]
- when CallbackMode =:= state_functions ->
- %% We get here e.g if apply fails
- %% due to State not being an atom
- {error,
- {undef_state_function,{Module,State,Args}},
- Stacktrace};
- Stacktrace ->
- {error,badarg,Stacktrace}
- end;
- error:undef ->
- %% Process undef to check for the simple mistake
- %% of calling a nonexistent state function
- %% to make the undef more precise
- case erlang:get_stacktrace() of
- [{Module,State,[Type,Content,Data]=Args,_}
- |Stacktrace]
- when CallbackMode =:= state_functions ->
- {error,
- {undef_state_function,{Module,State,Args}},
- Stacktrace};
- [{Module,handle_event,[Type,Content,State,Data]=Args,_}
- |Stacktrace]
- when CallbackMode =:= handle_event_function ->
- {error,
- {undef_state_function,{Module,handle_event,Args}},
- Stacktrace};
- Stacktrace ->
- {error,undef,Stacktrace}
- end;
Class:Reason ->
{Class,Reason,erlang:get_stacktrace()}
end.
@@ -1193,65 +1237,83 @@ call_state_function(
%% Interpret all callback return variants
parse_event_result(
- AllowStateChange, Debug, S, Result, Events, Event, State, Data) ->
+ AllowStateChange, Debug, S,
+ Events, Event, State, Data, Result) ->
case Result of
stop ->
terminate(
- exit, normal, ?STACKTRACE(), Debug, S, [Event|Events]);
+ exit, normal, ?STACKTRACE(), Debug,
+ S#{state := State, data := Data},
+ [Event|Events]);
{stop,Reason} ->
terminate(
- exit, Reason, ?STACKTRACE(), Debug, S, [Event|Events]);
+ exit, Reason, ?STACKTRACE(), Debug,
+ S#{state := State, data := Data},
+ [Event|Events]);
{stop,Reason,NewData} ->
terminate(
- exit, Reason, ?STACKTRACE(),
- Debug, S#{data := NewData}, [Event|Events]);
+ exit, Reason, ?STACKTRACE(), Debug,
+ S#{state := State, data := NewData},
+ [Event|Events]);
+ %%
{stop_and_reply,Reason,Replies} ->
- Q = [Event|Events],
reply_then_terminate(
- exit, Reason, ?STACKTRACE(),
- Debug, S, Q, Replies);
+ exit, Reason, ?STACKTRACE(), Debug,
+ S#{state := State, data := Data},
+ [Event|Events], Replies);
{stop_and_reply,Reason,Replies,NewData} ->
- Q = [Event|Events],
reply_then_terminate(
- exit, Reason, ?STACKTRACE(),
- Debug, S#{data := NewData}, Q, Replies);
+ exit, Reason, ?STACKTRACE(), Debug,
+ S#{state := State, data := NewData},
+ [Event|Events], Replies);
+ %%
{next_state,State,NewData} ->
- {NewData,State,[]};
+ {State,NewData,[],false};
{next_state,NextState,NewData} when AllowStateChange ->
- {NewData,NextState,[]};
+ {NextState,NewData,[],true};
{next_state,State,NewData,Actions} ->
- {NewData,State,Actions};
+ {State,NewData,Actions,false};
{next_state,NextState,NewData,Actions} when AllowStateChange ->
- {NewData,NextState,Actions};
+ {NextState,NewData,Actions,true};
+ %%
{keep_state,NewData} ->
- {NewData,State,[]};
+ {State,NewData,[],false};
{keep_state,NewData,Actions} ->
- {NewData,State,Actions};
+ {State,NewData,Actions,false};
keep_state_and_data ->
- {Data,State,[]};
+ {State,Data,[],false};
{keep_state_and_data,Actions} ->
- {Data,State,Actions};
+ {State,Data,Actions,false};
+ %%
+ {repeat_state,NewData} ->
+ {State,NewData,[],true};
+ {repeat_state,NewData,Actions} ->
+ {State,NewData,Actions,true};
+ repeat_state_and_data ->
+ {State,Data,[],true};
+ {repeat_state_and_data,Actions} ->
+ {State,Data,Actions,true};
+ %%
_ ->
terminate(
error,
{bad_return_from_state_function,Result},
- ?STACKTRACE(),
- Debug, S, [Event|Events])
+ ?STACKTRACE(), Debug,
+ S#{state := State, data := Data},
+ [Event|Events])
end.
-parse_enter_actions(
- Debug, S, State, Actions,
- Hibernate, TimeoutsR) ->
+parse_enter_actions(Debug, S, State, Actions, Hibernate, TimeoutsR) ->
Postpone = forbidden,
NextEventsR = forbidden,
parse_actions(
Debug, S, State, listify(Actions),
Hibernate, TimeoutsR, Postpone, NextEventsR).
-
+
parse_actions(Debug, S, State, Actions) ->
Hibernate = false,
- TimeoutsR = [],
+ TimeoutsR = [{timeout,infinity,infinity}], %% Will cancel event timer
Postpone = false,
NextEventsR = [],
parse_actions(
@@ -1279,64 +1341,29 @@ parse_actions(
{bad_action_from_state_function,Action},
?STACKTRACE()}
end;
+ %%
%% Actions that set options
{hibernate,NewHibernate} when is_boolean(NewHibernate) ->
parse_actions(
Debug, S, State, Actions,
NewHibernate, TimeoutsR, Postpone, NextEventsR);
- {hibernate,_} ->
- {error,
- {bad_action_from_state_function,Action},
- ?STACKTRACE()};
hibernate ->
+ NewHibernate = true,
parse_actions(
Debug, S, State, Actions,
- true, TimeoutsR, Postpone, NextEventsR);
- {state_timeout,Time,_} = StateTimeout
- when is_integer(Time), Time >= 0;
- Time =:= infinity ->
- parse_actions(
- Debug, S, State, Actions,
- Hibernate, [StateTimeout|TimeoutsR], Postpone, NextEventsR);
- {state_timeout,_,_} ->
- {error,
- {bad_action_from_state_function,Action},
- ?STACKTRACE()};
- {timeout,infinity,_} ->
- %% Ignore - timeout will never happen and already cancelled
- parse_actions(
- Debug, S, State, Actions,
- Hibernate, TimeoutsR, Postpone, NextEventsR);
- {timeout,Time,_} = Timeout when is_integer(Time), Time >= 0 ->
- parse_actions(
- Debug, S, State, Actions,
- Hibernate, [Timeout|TimeoutsR], Postpone, NextEventsR);
- {timeout,_,_} ->
- {error,
- {bad_action_from_state_function,Action},
- ?STACKTRACE()};
- infinity -> % Ignore - timeout will never happen
- parse_actions(
- Debug, S, State, Actions,
- Hibernate, TimeoutsR, Postpone, NextEventsR);
- Time when is_integer(Time), Time >= 0 ->
- Timeout = {timeout,Time,Time},
- parse_actions(
- Debug, S, State, Actions,
- Hibernate, [Timeout|TimeoutsR], Postpone, NextEventsR);
+ NewHibernate, TimeoutsR, Postpone, NextEventsR);
+ %%
{postpone,NewPostpone}
when is_boolean(NewPostpone), Postpone =/= forbidden ->
parse_actions(
Debug, S, State, Actions,
Hibernate, TimeoutsR, NewPostpone, NextEventsR);
- {postpone,_} ->
- {error,
- {bad_action_from_state_function,Action},
- ?STACKTRACE()};
postpone when Postpone =/= forbidden ->
+ NewPostpone = true,
parse_actions(
Debug, S, State, Actions,
- Hibernate, TimeoutsR, true, NextEventsR);
+ Hibernate, TimeoutsR, NewPostpone, NextEventsR);
+ %%
{next_event,Type,Content} ->
case event_type(Type) of
true when NextEventsR =/= forbidden ->
@@ -1351,96 +1378,150 @@ parse_actions(
{bad_action_from_state_function,Action},
?STACKTRACE()}
end;
- _ ->
+ %%
+ {state_timeout,_,_} = Timeout ->
+ parse_actions_timeout(
+ Debug, S, State, Actions,
+ Hibernate, TimeoutsR, Postpone, NextEventsR, Timeout);
+ {timeout,_,_} = Timeout ->
+ parse_actions_timeout(
+ Debug, S, State, Actions,
+ Hibernate, TimeoutsR, Postpone, NextEventsR, Timeout);
+ Time ->
+ parse_actions_timeout(
+ Debug, S, State, Actions,
+ Hibernate, TimeoutsR, Postpone, NextEventsR, Time)
+ end.
+
+parse_actions_timeout(
+ Debug, S, State, Actions,
+ Hibernate, TimeoutsR, Postpone, NextEventsR, Timeout) ->
+ Time =
+ case Timeout of
+ {_,T,_} -> T;
+ T -> T
+ end,
+ case validate_time(Time) of
+ true ->
+ parse_actions(
+ Debug, S, State, Actions,
+ Hibernate, [Timeout|TimeoutsR],
+ Postpone, NextEventsR);
+ false ->
{error,
- {bad_action_from_state_function,Action},
+ {bad_action_from_state_function,Timeout},
?STACKTRACE()}
end.
+validate_time(Time) when is_integer(Time), Time >= 0 -> true;
+validate_time(infinity) -> true;
+validate_time(_) -> false.
%% Stop and start timers as well as create timeout zero events
%% and pending event timer
%%
%% Stop and start timers non-event timers
-parse_timers(TimerRefs, TimerTypes, TimeoutsR) ->
- parse_timers(TimerRefs, TimerTypes, TimeoutsR, #{}, []).
+parse_timers(TimerRefs, TimerTypes, CancelTimers, TimeoutsR) ->
+ parse_timers(TimerRefs, TimerTypes, CancelTimers, TimeoutsR, #{}, []).
%%
-parse_timers(TimerRefs, TimerTypes, [], _Seen, TimeoutEvents) ->
- {TimerRefs,TimerTypes,TimeoutEvents};
parse_timers(
- TimerRefs, TimerTypes, [Timeout|TimeoutsR], Seen, TimeoutEvents) ->
- {TimerType,Time,TimerMsg} = Timeout,
+ TimerRefs, TimerTypes, CancelTimers, [], _Seen, TimeoutEvents) ->
+ {TimerRefs,TimerTypes,CancelTimers,TimeoutEvents};
+parse_timers(
+ TimerRefs, TimerTypes, CancelTimers, [Timeout|TimeoutsR],
+ Seen, TimeoutEvents) ->
+ case Timeout of
+ {TimerType,Time,TimerMsg} ->
+ parse_timers(
+ TimerRefs, TimerTypes, CancelTimers, TimeoutsR,
+ Seen, TimeoutEvents,
+ TimerType, Time, TimerMsg);
+ Time ->
+ parse_timers(
+ TimerRefs, TimerTypes, CancelTimers, TimeoutsR,
+ Seen, TimeoutEvents,
+ timeout, Time, Time)
+ end.
+
+parse_timers(
+ TimerRefs, TimerTypes, CancelTimers, TimeoutsR,
+ Seen, TimeoutEvents,
+ TimerType, Time, TimerMsg) ->
case Seen of
#{TimerType := _} ->
%% Type seen before - ignore
parse_timers(
- TimerRefs, TimerTypes, TimeoutsR, Seen, TimeoutEvents);
+ TimerRefs, TimerTypes, CancelTimers, TimeoutsR,
+ Seen, TimeoutEvents);
#{} ->
%% Unseen type - handle
NewSeen = Seen#{TimerType => true},
- %% Cancel any running timer
- {NewTimerRefs,NewTimerTypes} =
- cancel_timer_by_type(TimerType, TimerRefs, TimerTypes),
- if
- Time =:= infinity ->
- %% Ignore - timer will never fire
+ case Time of
+ infinity ->
+ %% Cancel any running timer
+ {NewTimerTypes,NewCancelTimers} =
+ cancel_timer_by_type(
+ TimerType, TimerTypes, CancelTimers),
parse_timers(
- NewTimerRefs, NewTimerTypes, TimeoutsR,
+ TimerRefs, NewTimerTypes, NewCancelTimers, TimeoutsR,
NewSeen, TimeoutEvents);
- TimerType =:= timeout ->
- %% Handle event timer later
- parse_timers(
- NewTimerRefs, NewTimerTypes, TimeoutsR,
- NewSeen, [Timeout|TimeoutEvents]);
- Time =:= 0 ->
+ 0 ->
+ %% Cancel any running timer
+ {NewTimerTypes,NewCancelTimers} =
+ cancel_timer_by_type(
+ TimerType, TimerTypes, CancelTimers),
%% Handle zero time timeouts later
TimeoutEvent = {TimerType,TimerMsg},
parse_timers(
- NewTimerRefs, NewTimerTypes, TimeoutsR,
+ TimerRefs, NewTimerTypes, NewCancelTimers, TimeoutsR,
NewSeen, [TimeoutEvent|TimeoutEvents]);
- true ->
- %% Start a new timer
- TimerRef = erlang:start_timer(Time, self(), TimerMsg),
- parse_timers(
- NewTimerRefs#{TimerRef => TimerType},
- NewTimerTypes#{TimerType => TimerRef},
- TimeoutsR, NewSeen, TimeoutEvents)
+ _ ->
+ %% (Re)start the timer
+ TimerRef =
+ erlang:start_timer(Time, self(), TimerMsg),
+ case TimerTypes of
+ #{TimerType := OldTimerRef} ->
+ %% Cancel the running timer
+ cancel_timer(OldTimerRef),
+ NewCancelTimers = CancelTimers + 1,
+ %% Insert the new timer into
+ %% both TimerRefs and TimerTypes
+ parse_timers(
+ TimerRefs#{TimerRef => TimerType},
+ TimerTypes#{TimerType => TimerRef},
+ NewCancelTimers, TimeoutsR,
+ NewSeen, TimeoutEvents);
+ #{} ->
+ parse_timers(
+ TimerRefs#{TimerRef => TimerType},
+ TimerTypes#{TimerType => TimerRef},
+ CancelTimers, TimeoutsR,
+ NewSeen, TimeoutEvents)
+ end
end
end.
-%% Enqueue immediate timeout events and start event timer
-process_timeout_events(TimerRefs, TimerTypes, [], EventsR) ->
- {TimerRefs, TimerTypes, EventsR};
-process_timeout_events(
- TimerRefs, TimerTypes,
- [{timeout,0,TimerMsg}|TimeoutEvents], []) ->
- %% No enqueued events - insert a timeout zero event
- TimeoutEvent = {timeout,TimerMsg},
- process_timeout_events(
- TimerRefs, TimerTypes,
- TimeoutEvents, [TimeoutEvent]);
-process_timeout_events(
- TimerRefs, TimerTypes,
- [{timeout,Time,TimerMsg}], []) ->
- %% No enqueued events - start event timer
- TimerRef = erlang:start_timer(Time, self(), TimerMsg),
- process_timeout_events(
- TimerRefs#{TimerRef => timeout}, TimerTypes#{timeout => TimerRef},
- [], []);
-process_timeout_events(
- TimerRefs, TimerTypes,
- [{timeout,_Time,_TimerMsg}|TimeoutEvents], EventsR) ->
- %% There will be some other event so optimize by not starting
- %% an event timer to just have to cancel it again
- process_timeout_events(
- TimerRefs, TimerTypes,
- TimeoutEvents, EventsR);
-process_timeout_events(
- TimerRefs, TimerTypes,
- [{_TimeoutType,_TimeoutMsg} = TimeoutEvent|TimeoutEvents], EventsR) ->
- process_timeout_events(
- TimerRefs, TimerTypes,
- TimeoutEvents, [TimeoutEvent|EventsR]).
+%% Enqueue immediate timeout events (timeout 0 events)
+%%
+%% Event timer timeout 0 events gets special treatment since
+%% an event timer is cancelled by any received event,
+%% so if there are enqueued events before the event timer
+%% timeout 0 event - the event timer is cancelled hence no event.
+%%
+%% Other (state_timeout) timeout 0 events that are after
+%% the event timer timeout 0 events are considered to
+%% belong to timers that were started after the event timer
+%% timeout 0 event fired, so they do not cancel the event timer.
+%%
+prepend_timeout_events([], EventsR) ->
+ EventsR;
+prepend_timeout_events([{timeout,_} = TimeoutEvent|TimeoutEvents], []) ->
+ prepend_timeout_events(TimeoutEvents, [TimeoutEvent]);
+prepend_timeout_events([{timeout,_}|TimeoutEvents], EventsR) ->
+ prepend_timeout_events(TimeoutEvents, EventsR);
+prepend_timeout_events([TimeoutEvent|TimeoutEvents], EventsR) ->
+ %% Just prepend all others
+ prepend_timeout_events(TimeoutEvents, [TimeoutEvent|EventsR]).
@@ -1448,18 +1529,11 @@ process_timeout_events(
%% Server helpers
reply_then_terminate(
- Class, Reason, Stacktrace,
- Debug, #{state := State} = S, Q, Replies) ->
- if
- is_list(Replies) ->
- do_reply_then_terminate(
- Class, Reason, Stacktrace,
- Debug, S, Q, Replies, State);
- true ->
- do_reply_then_terminate(
- Class, Reason, Stacktrace,
- Debug, S, Q, [Replies], State)
- end.
+ Class, Reason, Stacktrace, Debug,
+ #{state := State} = S, Q, Replies) ->
+ do_reply_then_terminate(
+ Class, Reason, Stacktrace, Debug,
+ S, Q, listify(Replies), State).
%%
do_reply_then_terminate(
Class, Reason, Stacktrace, Debug, S, Q, [], _State) ->
@@ -1485,21 +1559,25 @@ do_reply(Debug, S, State, From, Reply) ->
terminate(
- Class, Reason, Stacktrace,
- Debug,
+ Class, Reason, Stacktrace, Debug,
#{module := Module, state := State, data := Data, postponed := P} = S,
Q) ->
- try Module:terminate(Reason, State, Data) of
- _ -> ok
- catch
- _ -> ok;
- C:R ->
- ST = erlang:get_stacktrace(),
- error_info(
- C, R, ST, S, Q, P,
- format_status(terminate, get(), S)),
- sys:print_log(Debug),
- erlang:raise(C, R, ST)
+ case erlang:function_exported(Module, terminate, 3) of
+ true ->
+ try Module:terminate(Reason, State, Data) of
+ _ -> ok
+ catch
+ _ -> ok;
+ C:R ->
+ ST = erlang:get_stacktrace(),
+ error_info(
+ C, R, ST, S, Q, P,
+ format_status(terminate, get(), S)),
+ sys:print_log(Debug),
+ erlang:raise(C, R, ST)
+ end;
+ false ->
+ ok
end,
_ =
case Reason of
@@ -1637,28 +1715,21 @@ listify(Item) ->
[Item].
%% Cancel timer if running, otherwise no op
-cancel_timer_by_type(TimerType, TimerRefs, TimerTypes) ->
+%%
+%% This is an asynchronous cancel so the timer is not really cancelled
+%% until we get a cancel_timer msg i.e {cancel_timer,TimerRef,_}.
+%% In the mean time we might get a timeout message.
+%%
+%% Remove the timer from TimerTypes.
+%% When we get the cancel_timer msg we remove it from TimerRefs.
+cancel_timer_by_type(TimerType, TimerTypes, CancelTimers) ->
case TimerTypes of
#{TimerType := TimerRef} ->
cancel_timer(TimerRef),
- {maps:remove(TimerRef, TimerRefs),
- maps:remove(TimerType, TimerTypes)};
+ {maps:remove(TimerType, TimerTypes),CancelTimers + 1};
#{} ->
- {TimerRefs,TimerTypes}
+ {TimerTypes,CancelTimers}
end.
-%%cancel_timer(undefined) ->
-%% ok;
-cancel_timer(TRef) ->
- case erlang:cancel_timer(TRef) of
- false ->
- %% We have to assume that TRef is the ref of a running timer
- %% and if so the timer has expired
- %% hence we must wait for the timeout message
- receive
- {timeout,TRef,_} ->
- ok
- end;
- _TimeLeft ->
- ok
- end.
+cancel_timer(TimerRef) ->
+ ok = erlang:cancel_timer(TimerRef, [{async,true}]).
diff --git a/lib/stdlib/src/io_lib.erl b/lib/stdlib/src/io_lib.erl
index ad98bc0420..a91143a764 100644
--- a/lib/stdlib/src/io_lib.erl
+++ b/lib/stdlib/src/io_lib.erl
@@ -28,7 +28,7 @@
%% Most of the code here is derived from the original prolog versions and
%% from similar code written by Joe Armstrong and myself.
%%
-%% This module has been split into seperate modules:
+%% This module has been split into separate modules:
%% io_lib - basic write and utilities
%% io_lib_format - formatted output
%% io_lib_fread - formatted input
diff --git a/lib/stdlib/src/io_lib_format.erl b/lib/stdlib/src/io_lib_format.erl
index c7b75961cb..3113767614 100644
--- a/lib/stdlib/src/io_lib_format.erl
+++ b/lib/stdlib/src/io_lib_format.erl
@@ -265,7 +265,10 @@ control($W, [A,Depth], F, Adj, P, Pad, _Enc, _Str, _I) when is_integer(Depth) ->
term(io_lib:write(A, Depth), F, Adj, P, Pad);
control($P, [A,Depth], F, Adj, P, Pad, Enc, Str, I) when is_integer(Depth) ->
print(A, Depth, F, Adj, P, Pad, Enc, Str, I);
-control($s, [A], F, Adj, P, Pad, _Enc, _Str, _I) when is_atom(A) ->
+control($s, [A], F, Adj, P, Pad, latin1, _Str, _I) when is_atom(A) ->
+ L = iolist_to_chars(atom_to_list(A)),
+ string(L, F, Adj, P, Pad);
+control($s, [A], F, Adj, P, Pad, unicode, _Str, _I) when is_atom(A) ->
string(atom_to_list(A), F, Adj, P, Pad);
control($s, [L0], F, Adj, P, Pad, latin1, _Str, _I) ->
L = iolist_to_chars(L0),
diff --git a/lib/stdlib/src/io_lib_pretty.erl b/lib/stdlib/src/io_lib_pretty.erl
index 94376408d1..aabccfc5d9 100644
--- a/lib/stdlib/src/io_lib_pretty.erl
+++ b/lib/stdlib/src/io_lib_pretty.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -139,6 +139,10 @@ max_cs(M, _Len) ->
M.
-define(ATM(T), is_list(element(1, T))).
+-define(ATM_PAIR(Pair),
+ ?ATM(element(2, element(1, Pair))) % Key
+ andalso
+ ?ATM(element(3, element(1, Pair)))). % Value
-define(ATM_FLD(Field), ?ATM(element(4, element(1, Field)))).
pp({_S, Len} = If, Col, Ll, M, _TInd, _Ind, LD, W)
@@ -151,9 +155,8 @@ pp({{tuple,true,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
pp({{tuple,false,L}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[${, pp_list(L, Col + 1, Ll, M, TInd, indent(1, Ind), LD, $,, W + 1), $}];
pp({{map,Pairs},_Len}, Col, Ll, M, TInd, Ind, LD, W) ->
- [$#,${, pp_list(Pairs, Col + 2, Ll, M, TInd, indent(2, Ind), LD, $,, W + 1), $}];
-pp({{map_pair,K,V},_Len}, Col, Ll, M, TInd, Ind, LD, W) ->
- [pp(K, Col, Ll, M, TInd, Ind, LD, W), " => ", pp(V, Col, Ll, M, TInd, Ind, LD, W)];
+ [$#, ${, pp_map(Pairs, Col + 2, Ll, M, TInd, indent(2, Ind), LD, W + 1),
+ $}];
pp({{record,[{Name,NLen} | L]}, _Len}, Col, Ll, M, TInd, Ind, LD, W) ->
[Name, ${, pp_record(L, NLen, Col, Ll, M, TInd, Ind, LD, W + NLen+1), $}];
pp({{bin,S}, _Len}, Col, Ll, M, _TInd, Ind, LD, W) ->
@@ -178,6 +181,46 @@ pp_tag_tuple([{Tag,Tlen} | L], Col, Ll, M, TInd, Ind, LD, W) ->
[Tag, S | pp_list(L, Tcol, Ll, M, TInd, Indent, LD, S, W+Tlen+1)]
end.
+pp_map([], _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
+ "";
+pp_map({dots, _}, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
+ "...";
+pp_map([P | Ps], Col, Ll, M, TInd, Ind, LD, W) ->
+ {PS, PW} = pp_pair(P, Col, Ll, M, TInd, Ind, last_depth(Ps, LD), W),
+ [PS | pp_pairs_tail(Ps, Col, Col + PW, Ll, M, TInd, Ind, LD, PW)].
+
+pp_pairs_tail([], _Col0, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
+ "";
+pp_pairs_tail({dots, _}, _Col0, _Col, _M, _Ll, _TInd, _Ind, _LD, _W) ->
+ ",...";
+pp_pairs_tail([{_, Len}=P | Ps], Col0, Col, Ll, M, TInd, Ind, LD, W) ->
+ LD1 = last_depth(Ps, LD),
+ ELen = 1 + Len,
+ if
+ LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM_PAIR(P);
+ LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM_PAIR(P) ->
+ [$,, write_pair(P) |
+ pp_pairs_tail(Ps, Col0, Col+ELen, Ll, M, TInd, Ind, LD, W+ELen)];
+ true ->
+ {PS, PW} = pp_pair(P, Col0, Ll, M, TInd, Ind, LD1, 0),
+ [$,, $\n, Ind, PS |
+ pp_pairs_tail(Ps, Col0, Col0 + PW, Ll, M, TInd, Ind, LD, PW)]
+ end.
+
+pp_pair({_, Len}=Pair, Col, Ll, M, _TInd, _Ind, LD, W)
+ when Len < Ll - Col - LD, Len + W + LD =< M ->
+ {write_pair(Pair), if
+ ?ATM_PAIR(Pair) ->
+ Len;
+ true ->
+ Ll % force nl
+ end};
+pp_pair({{map_pair, K, V}, _Len}, Col0, Ll, M, TInd, Ind0, LD, W) ->
+ I = map_value_indent(TInd),
+ Ind = indent(I, Ind0),
+ {[pp(K, Col0, Ll, M, TInd, Ind0, LD, W), " =>\n",
+ Ind | pp(V, Col0 + I, Ll, M, TInd, Ind, LD, 0)], Ll}. % force nl
+
pp_record([], _Nlen, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
"";
pp_record({dots, _}, _Nlen, _Col, _Ll, _M, _TInd, _Ind, _LD, _W) ->
@@ -216,7 +259,11 @@ pp_field({_, Len}=Fl, Col, Ll, M, _TInd, _Ind, LD, W)
end};
pp_field({{field, Name, NameL, F}, _Len}, Col0, Ll, M, TInd, Ind0, LD, W0) ->
{Col, Ind, S, W} = rec_indent(NameL, TInd, Col0, Ind0, W0 + NameL),
- {[Name, " = ", S | pp(F, Col, Ll, M, TInd, Ind, LD, W)], Ll}. % force nl
+ Sep = case S of
+ [$\n | _] -> " =";
+ _ -> " = "
+ end,
+ {[Name, Sep, S | pp(F, Col, Ll, M, TInd, Ind, LD, W)], Ll}. % force nl
rec_indent(RInd, TInd, Col0, Ind0, W0) ->
%% this uses TInd
@@ -305,8 +352,8 @@ write({{list, L}, _}) ->
[$[, write_list(L, $|), $]];
write({{map, Pairs}, _}) ->
[$#,${, write_list(Pairs, $,), $}];
-write({{map_pair, K, V}, _}) ->
- [write(K)," => ",write(V)];
+write({{map_pair, _K, _V}, _}=Pair) ->
+ write_pair(Pair);
write({{record, [{Name,_} | L]}, _}) ->
[Name, ${, write_fields(L), $}];
write({{bin, S}, _}) ->
@@ -314,6 +361,9 @@ write({{bin, S}, _}) ->
write({S, _}) ->
S.
+write_pair({{map_pair, K, V}, _}) ->
+ [write(K), " => ", write(V)].
+
write_fields([]) ->
"";
write_fields({dots, _}) ->
@@ -347,7 +397,7 @@ write_tail(E, S) ->
%% The depth (D) is used for extracting and counting the characters to
%% print. The structure is kept so that the returned intermediate
-%% format can be formatted. The separators (list, tuple, record) are
+%% format can be formatted. The separators (list, tuple, record, map) are
%% counted but need to be added later.
%% D =/= 0
@@ -423,21 +473,22 @@ print_length(Term, _D, _RF, _Enc, _Str) ->
print_length_map(_Map, 1, _RF, _Enc, _Str) ->
{"#{...}", 6};
print_length_map(Map, D, RF, Enc, Str) when is_map(Map) ->
- Pairs = print_length_map_pairs(maps:to_list(Map), D, RF, Enc, Str),
+ Pairs = print_length_map_pairs(erts_internal:maps_to_list(Map, D), D, RF, Enc, Str),
{{map, Pairs}, list_length(Pairs, 3)}.
print_length_map_pairs([], _D, _RF, _Enc, _Str) ->
[];
print_length_map_pairs(_Pairs, 1, _RF, _Enc, _Str) ->
{dots, 3};
-print_length_map_pairs([{K,V}|Pairs], D, RF, Enc, Str) ->
- [print_length_map_pair(K,V,D-1,RF,Enc,Str) |
- print_length_map_pairs(Pairs,D-1,RF,Enc,Str)].
+print_length_map_pairs([{K, V} | Pairs], D, RF, Enc, Str) ->
+ [print_length_map_pair(K, V, D - 1, RF, Enc, Str) |
+ print_length_map_pairs(Pairs, D - 1, RF, Enc, Str)].
print_length_map_pair(K, V, D, RF, Enc, Str) ->
{KS, KL} = print_length(K, D, RF, Enc, Str),
{VS, VL} = print_length(V, D, RF, Enc, Str),
- {{map_pair, {KS,KL}, {VS,VL}}, KL + VL}.
+ KL1 = KL + 4,
+ {{map_pair, {KS, KL1}, {VS, VL}}, KL1 + VL}.
print_length_tuple(_Tuple, 1, _RF, _Enc, _Str) ->
{"{...}", 5};
@@ -630,6 +681,8 @@ cind({{tuple,true,L}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_tag_tuple(L, Col, Ll, M, Ind, LD, W + 1);
cind({{tuple,false,L}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_list(L, Col + 1, Ll, M, Ind, LD, W + 1);
+cind({{map,Pairs},_Len}, Col, Ll, M, Ind, LD, W) ->
+ cind_map(Pairs, Col + 2, Ll, M, Ind, LD, W + 2);
cind({{record,[{_Name,NLen} | L]}, _Len}, Col, Ll, M, Ind, LD, W) ->
cind_record(L, NLen, Col, Ll, M, Ind, LD, W + NLen + 1);
cind({{bin,_S}, _Len}, _Col, _Ll, _M, Ind, _LD, _W) ->
@@ -655,6 +708,48 @@ cind_tag_tuple([{_Tag,Tlen} | L], Col, Ll, M, Ind, LD, W) ->
throw(no_good)
end.
+cind_map([P | Ps], Col, Ll, M, Ind, LD, W) ->
+ PW = cind_pair(P, Col, Ll, M, Ind, last_depth(Ps, LD), W),
+ cind_pairs_tail(Ps, Col, Col + PW, Ll, M, Ind, LD, W + PW);
+cind_map(_, _Col, _Ll, _M, Ind, _LD, _W) ->
+ Ind.
+
+cind_pairs_tail([{_, Len}=P | Ps], Col0, Col, Ll, M, Ind, LD, W) ->
+ LD1 = last_depth(Ps, LD),
+ ELen = 1 + Len,
+ if
+ LD1 =:= 0, ELen + 1 < Ll - Col, W + ELen + 1 =< M, ?ATM_PAIR(P);
+ LD1 > 0, ELen < Ll - Col - LD1, W + ELen + LD1 =< M, ?ATM_PAIR(P) ->
+ cind_pairs_tail(Ps, Col0, Col + ELen, Ll, M, Ind, LD, W + ELen);
+ true ->
+ PW = cind_pair(P, Col0, Ll, M, Ind, LD1, 0),
+ cind_pairs_tail(Ps, Col0, Col0 + PW, Ll, M, Ind, LD, PW)
+ end;
+cind_pairs_tail(_, _Col0, _Col, _Ll, _M, Ind, _LD, _W) ->
+ Ind.
+
+cind_pair({{map_pair, _Key, _Value}, Len}=Pair, Col, Ll, M, _Ind, LD, W)
+ when Len < Ll - Col - LD, Len + W + LD =< M ->
+ if
+ ?ATM_PAIR(Pair) ->
+ Len;
+ true ->
+ Ll
+ end;
+cind_pair({{map_pair, K, V}, _Len}, Col0, Ll, M, Ind, LD, W0) ->
+ cind(K, Col0, Ll, M, Ind, LD, W0),
+ I = map_value_indent(Ind),
+ cind(V, Col0 + I, Ll, M, Ind, LD, 0),
+ Ll.
+
+map_value_indent(TInd) ->
+ case TInd > 0 of
+ true ->
+ TInd;
+ false ->
+ 4
+ end.
+
cind_record([F | Fs], Nlen, Col0, Ll, M, Ind, LD, W0) ->
Nind = Nlen + 1,
{Col, W} = cind_rec(Nind, Col0, Ll, M, Ind, W0),
diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl
index 4161ced9ab..2a0e3118d0 100644
--- a/lib/stdlib/src/otp_internal.erl
+++ b/lib/stdlib/src/otp_internal.erl
@@ -47,9 +47,6 @@ obsolete(Module, Name, Arity) ->
obsolete_1(net, _, _) ->
{deprecated, "module 'net' obsolete; use 'net_adm'"};
-obsolete_1(erlang, hash, 2) ->
- {deprecated, {erlang, phash2, 2}};
-
obsolete_1(erlang, now, 0) ->
{deprecated,
"Deprecated BIF. See the \"Time and Time Correction in Erlang\" "
@@ -408,7 +405,7 @@ obsolete_1(docb_xml_check, _, _) ->
%% Added in R15B
obsolete_1(asn1rt, F, _) when F == load_driver; F == unload_driver ->
- {deprecated,"deprecated (will be removed in OTP 18); has no effect as drivers are no longer used"};
+ {removed,"removed (will be removed in OTP 18); has no effect as drivers are no longer used"};
obsolete_1(ssl, pid, 1) ->
{removed,"was removed in R16; is no longer needed"};
obsolete_1(inviso, _, _) ->
@@ -463,21 +460,23 @@ obsolete_1(wxCursor, new, 4) ->
%% Added in OTP 17.
obsolete_1(asn1ct, decode,3) ->
- {deprecated,"deprecated; use Mod:decode/2 instead"};
+ {removed,"removed; use Mod:decode/2 instead"};
+obsolete_1(asn1ct, encode, 2) ->
+ {removed,"removed; use Mod:encode/2 instead"};
obsolete_1(asn1ct, encode, 3) ->
- {deprecated,"deprecated; use Mod:encode/2 instead"};
+ {removed,"removed; use Mod:encode/2 instead"};
obsolete_1(asn1rt, decode,3) ->
- {deprecated,"deprecated; use Mod:decode/2 instead"};
+ {removed,"removed; use Mod:decode/2 instead"};
obsolete_1(asn1rt, encode, 2) ->
- {deprecated,"deprecated; use Mod:encode/2 instead"};
+ {removed,"removed; use Mod:encode/2 instead"};
obsolete_1(asn1rt, encode, 3) ->
- {deprecated,"deprecated; use Mod:encode/2 instead"};
+ {removed,"removed; use Mod:encode/2 instead"};
obsolete_1(asn1rt, info, 1) ->
- {deprecated,"deprecated; use Mod:info/0 instead"};
+ {removed,"removed; use Mod:info/0 instead"};
obsolete_1(asn1rt, utf8_binary_to_list, 1) ->
- {deprecated,{unicode,characters_to_list,1}};
+ {removed,{unicode,characters_to_list,1},"OTP 20"};
obsolete_1(asn1rt, utf8_list_to_binary, 1) ->
- {deprecated,{unicode,characters_to_binary,1}};
+ {removed,{unicode,characters_to_binary,1},"OTP 20"};
%% Added in OTP 18.
obsolete_1(core_lib, get_anno, 1) ->
@@ -551,6 +550,20 @@ obsolete_1(overload, _, _) ->
obsolete_1(rpc, safe_multi_server_call, A) when A =:= 2; A =:= 3 ->
{removed, {rpc, multi_server_call, A}};
+%% Added in OTP 20.
+
+obsolete_1(filename, find_src, 1) ->
+ {deprecated, "deprecated; use filelib:find_source/1 instead"};
+obsolete_1(filename, find_src, 2) ->
+ {deprecated, "deprecated; use filelib:find_source/3 instead"};
+
+%% Removed in OTP 20.
+
+obsolete_1(erlang, hash, 2) ->
+ {removed, {erlang, phash2, 2}, "20.0"};
+
+%% not obsolete
+
obsolete_1(_, _, _) ->
no.
diff --git a/lib/stdlib/src/proplists.erl b/lib/stdlib/src/proplists.erl
index 21de8c45c1..340dfdcac9 100644
--- a/lib/stdlib/src/proplists.erl
+++ b/lib/stdlib/src/proplists.erl
@@ -83,7 +83,7 @@ property(Key, Value) ->
%% ---------------------------------------------------------------------
-%% @doc Unfolds all occurences of atoms in <code>ListIn</code> to tuples
+%% @doc Unfolds all occurrences of atoms in <code>ListIn</code> to tuples
%% <code>{Atom, true}</code>.
%%
%% @see compact/1
diff --git a/lib/stdlib/src/qlc.erl b/lib/stdlib/src/qlc.erl
index f3665824f2..8c4d835432 100644
--- a/lib/stdlib/src/qlc.erl
+++ b/lib/stdlib/src/qlc.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -1292,6 +1292,10 @@ abstr_term(Fun, Line) when is_function(Fun) ->
end;
abstr_term(PPR, Line) when is_pid(PPR); is_port(PPR); is_reference(PPR) ->
{special, Line, lists:flatten(io_lib:write(PPR))};
+abstr_term(Map, Line) when is_map(Map) ->
+ {map,Line,
+ [{map_field_assoc,Line,abstr_term(K, Line),abstr_term(V, Line)} ||
+ {K,V} <- maps:to_list(Map)]};
abstr_term(Simple, Line) ->
erl_parse:abstract(Simple, erl_anno:line(Line)).
diff --git a/lib/stdlib/src/shell_default.erl b/lib/stdlib/src/shell_default.erl
index cd63ab28b5..a0c1d98513 100644
--- a/lib/stdlib/src/shell_default.erl
+++ b/lib/stdlib/src/shell_default.erl
@@ -23,7 +23,7 @@
-module(shell_default).
--export([help/0,lc/1,c/1,c/2,nc/1,nl/1,l/1,i/0,pid/3,i/3,m/0,m/1,lm/0,mm/0,
+-export([help/0,lc/1,c/1,c/2,c/3,nc/1,nl/1,l/1,i/0,pid/3,i/3,m/0,m/1,lm/0,mm/0,
memory/0,memory/1,uptime/0,
erlangrc/1,bi/1, regs/0, flush/0,pwd/0,ls/0,ls/1,cd/1,
y/1, y/2,
@@ -72,6 +72,7 @@ bi(I) -> c:bi(I).
bt(Pid) -> c:bt(Pid).
c(File) -> c:c(File).
c(File, Opt) -> c:c(File, Opt).
+c(File, Opt, Filter) -> c:c(File, Opt, Filter).
cd(D) -> c:cd(D).
erlangrc(X) -> c:erlangrc(X).
flush() -> c:flush().
diff --git a/lib/stdlib/src/sofs.erl b/lib/stdlib/src/sofs.erl
index c244e06ca4..cc50e1b52c 100644
--- a/lib/stdlib/src/sofs.erl
+++ b/lib/stdlib/src/sofs.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -76,7 +76,7 @@
%%
%% See also "Naive Set Theory" by Paul R. Halmos.
%%
-%% By convention, erlang:error/2 is called from exported functions.
+%% By convention, erlang:error/1 is called from exported functions.
-define(TAG, 'Set').
-define(ORDTAG, 'OrdSet').
@@ -87,12 +87,6 @@
-define(LIST(S), (S)#?TAG.data).
-define(TYPE(S), (S)#?TAG.type).
-%%-define(SET(L, T),
-%% case is_type(T) of
-%% true -> #?TAG{data = L, type = T};
-%% false -> erlang:error(badtype, [T])
-%% end
-%% ).
-define(SET(L, T), #?TAG{data = L, type = T}).
-define(IS_SET(S), is_record(S, ?TAG)).
-define(IS_UNTYPED_SET(S), ?TYPE(S) =:= ?ANYTYPE).
@@ -154,11 +148,8 @@ from_term(T) ->
_ when is_list(T) -> [?ANYTYPE];
_ -> ?ANYTYPE
end,
- case catch setify(T, Type) of
- {'EXIT', _} ->
- erlang:error(badarg, [T]);
- Set ->
- Set
+ try setify(T, Type)
+ catch _:_ -> erlang:error(badarg)
end.
-spec(from_term(Term, Type) -> AnySet when
@@ -168,14 +159,11 @@ from_term(T) ->
from_term(L, T) ->
case is_type(T) of
true ->
- case catch setify(L, T) of
- {'EXIT', _} ->
- erlang:error(badarg, [L, T]);
- Set ->
- Set
+ try setify(L, T)
+ catch _:_ -> erlang:error(badarg)
end;
false ->
- erlang:error(badarg, [L, T])
+ erlang:error(badarg)
end.
-spec(from_external(ExternalSet, Type) -> AnySet when
@@ -208,33 +196,26 @@ is_type(_T) ->
Set :: a_set(),
Terms :: [term()]).
set(L) ->
- case catch usort(L) of
- {'EXIT', _} ->
- erlang:error(badarg, [L]);
- SL ->
- ?SET(SL, ?ATOM_TYPE)
+ try usort(L) of
+ SL -> ?SET(SL, ?ATOM_TYPE)
+ catch _:_ -> erlang:error(badarg)
end.
-spec(set(Terms, Type) -> Set when
Set :: a_set(),
Terms :: [term()],
Type :: type()).
-set(L, ?SET_OF(Type) = T) when ?IS_ATOM_TYPE(Type), Type =/= ?ANYTYPE ->
- case catch usort(L) of
- {'EXIT', _} ->
- erlang:error(badarg, [L, T]);
- SL ->
- ?SET(SL, Type)
+set(L, ?SET_OF(Type)) when ?IS_ATOM_TYPE(Type), Type =/= ?ANYTYPE ->
+ try usort(L) of
+ SL -> ?SET(SL, Type)
+ catch _:_ -> erlang:error(badarg)
end;
set(L, ?SET_OF(_) = T) ->
- case catch setify(L, T) of
- {'EXIT', _} ->
- erlang:error(badarg, [L, T]);
- Set ->
- Set
+ try setify(L, T)
+ catch _:_ -> erlang:error(badarg)
end;
-set(L, T) ->
- erlang:error(badarg, [L, T]).
+set(_, _) ->
+ erlang:error(badarg).
-spec(from_sets(ListOfSets) -> Set when
Set :: a_set(),
@@ -245,19 +226,19 @@ set(L, T) ->
from_sets(Ss) when is_list(Ss) ->
case set_of_sets(Ss, [], ?ANYTYPE) of
{error, Error} ->
- erlang:error(Error, [Ss]);
+ erlang:error(Error);
Set ->
Set
end;
from_sets(Tuple) when is_tuple(Tuple) ->
case ordset_of_sets(tuple_to_list(Tuple), [], []) of
error ->
- erlang:error(badarg, [Tuple]);
+ erlang:error(badarg);
Set ->
Set
end;
-from_sets(T) ->
- erlang:error(badarg, [T]).
+from_sets(_) ->
+ erlang:error(badarg).
-spec(relation(Tuples) -> Relation when
Relation :: relation(),
@@ -265,14 +246,11 @@ from_sets(T) ->
relation([]) ->
?SET([], ?BINREL(?ATOM_TYPE, ?ATOM_TYPE));
relation(Ts = [T | _]) when is_tuple(T) ->
- case catch rel(Ts, tuple_size(T)) of
- {'EXIT', _} ->
- erlang:error(badarg, [Ts]);
- Set ->
- Set
+ try rel(Ts, tuple_size(T))
+ catch _:_ -> erlang:error(badarg)
end;
-relation(E) ->
- erlang:error(badarg, [E]).
+relation(_) ->
+ erlang:error(badarg).
-spec(relation(Tuples, Type) -> Relation when
N :: integer(),
@@ -280,24 +258,20 @@ relation(E) ->
Relation :: relation(),
Tuples :: [tuple()]).
relation(Ts, TS) ->
- case catch rel(Ts, TS) of
- {'EXIT', _} ->
- erlang:error(badarg, [Ts, TS]);
- Set ->
- Set
+ try rel(Ts, TS)
+ catch _:_ -> erlang:error(badarg)
end.
-spec(a_function(Tuples) -> Function when
Function :: a_function(),
Tuples :: [tuple()]).
a_function(Ts) ->
- case catch func(Ts, ?BINREL(?ATOM_TYPE, ?ATOM_TYPE)) of
- {'EXIT', _} ->
- erlang:error(badarg, [Ts]);
+ try func(Ts, ?BINREL(?ATOM_TYPE, ?ATOM_TYPE)) of
Bad when is_atom(Bad) ->
- erlang:error(Bad, [Ts]);
- Set ->
- Set
+ erlang:error(Bad);
+ Set ->
+ Set
+ catch _:_ -> erlang:error(badarg)
end.
-spec(a_function(Tuples, Type) -> Function when
@@ -305,26 +279,24 @@ a_function(Ts) ->
Tuples :: [tuple()],
Type :: type()).
a_function(Ts, T) ->
- case catch a_func(Ts, T) of
- {'EXIT', _} ->
- erlang:error(badarg, [Ts, T]);
+ try a_func(Ts, T) of
Bad when is_atom(Bad) ->
- erlang:error(Bad, [Ts, T]);
+ erlang:error(Bad);
Set ->
Set
+ catch _:_ -> erlang:error(badarg)
end.
-spec(family(Tuples) -> Family when
Family :: family(),
Tuples :: [tuple()]).
family(Ts) ->
- case catch fam2(Ts, ?FAMILY(?ATOM_TYPE, ?ATOM_TYPE)) of
- {'EXIT', _} ->
- erlang:error(badarg, [Ts]);
+ try fam2(Ts, ?FAMILY(?ATOM_TYPE, ?ATOM_TYPE)) of
Bad when is_atom(Bad) ->
- erlang:error(Bad, [Ts]);
+ erlang:error(Bad);
Set ->
Set
+ catch _:_ -> erlang:error(badarg)
end.
-spec(family(Tuples, Type) -> Family when
@@ -332,13 +304,12 @@ family(Ts) ->
Tuples :: [tuple()],
Type :: type()).
family(Ts, T) ->
- case catch fam(Ts, T) of
- {'EXIT', _} ->
- erlang:error(badarg, [Ts, T]);
+ try fam(Ts, T) of
Bad when is_atom(Bad) ->
- erlang:error(Bad, [Ts, T]);
+ erlang:error(Bad);
Set ->
Set
+ catch _:_ -> erlang:error(badarg)
end.
%%%
@@ -373,7 +344,7 @@ to_sets(S) when ?IS_SET(S) ->
to_sets(S) when ?IS_ORDSET(S), is_tuple(?ORDTYPE(S)) ->
tuple_of_sets(tuple_to_list(?ORDDATA(S)), tuple_to_list(?ORDTYPE(S)), []);
to_sets(S) when ?IS_ORDSET(S) ->
- erlang:error(badarg, [S]).
+ erlang:error(badarg).
-spec(no_elements(ASet) -> NoElements when
ASet :: a_set() | ordset(),
@@ -383,7 +354,7 @@ no_elements(S) when ?IS_SET(S) ->
no_elements(S) when ?IS_ORDSET(S), is_tuple(?ORDTYPE(S)) ->
tuple_size(?ORDDATA(S));
no_elements(S) when ?IS_ORDSET(S) ->
- erlang:error(badarg, [S]).
+ erlang:error(badarg).
-spec(specification(Fun, Set1) -> Set2 when
Fun :: spec_fun(),
@@ -401,7 +372,7 @@ specification(Fun, S) when ?IS_SET(S) ->
SL when is_list(SL) ->
?SET(SL, Type);
Bad ->
- erlang:error(Bad, [Fun, S])
+ erlang:error(Bad)
end.
-spec(union(Set1, Set2) -> Set3 when
@@ -410,7 +381,7 @@ specification(Fun, S) when ?IS_SET(S) ->
Set3 :: a_set()).
union(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
case unify_types(?TYPE(S1), ?TYPE(S2)) of
- [] -> erlang:error(type_mismatch, [S1, S2]);
+ [] -> erlang:error(type_mismatch);
Type -> ?SET(umerge(?LIST(S1), ?LIST(S2)), Type)
end.
@@ -420,7 +391,7 @@ union(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
Set3 :: a_set()).
intersection(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
case unify_types(?TYPE(S1), ?TYPE(S2)) of
- [] -> erlang:error(type_mismatch, [S1, S2]);
+ [] -> erlang:error(type_mismatch);
Type -> ?SET(intersection(?LIST(S1), ?LIST(S2), []), Type)
end.
@@ -430,7 +401,7 @@ intersection(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
Set3 :: a_set()).
difference(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
case unify_types(?TYPE(S1), ?TYPE(S2)) of
- [] -> erlang:error(type_mismatch, [S1, S2]);
+ [] -> erlang:error(type_mismatch);
Type -> ?SET(difference(?LIST(S1), ?LIST(S2), []), Type)
end.
@@ -440,7 +411,7 @@ difference(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
Set3 :: a_set()).
symdiff(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
case unify_types(?TYPE(S1), ?TYPE(S2)) of
- [] -> erlang:error(type_mismatch, [S1, S2]);
+ [] -> erlang:error(type_mismatch);
Type -> ?SET(symdiff(?LIST(S1), ?LIST(S2), []), Type)
end.
@@ -452,7 +423,7 @@ symdiff(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
Set5 :: a_set()).
symmetric_partition(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
case unify_types(?TYPE(S1), ?TYPE(S2)) of
- [] -> erlang:error(type_mismatch, [S1, S2]);
+ [] -> erlang:error(type_mismatch);
Type -> sympart(?LIST(S1), ?LIST(S2), [], [], [], Type)
end.
@@ -477,11 +448,9 @@ product({S1, S2}) ->
product(S1, S2);
product(T) when is_tuple(T) ->
Ss = tuple_to_list(T),
- case catch sets_to_list(Ss) of
- {'EXIT', _} ->
- erlang:error(badarg, [T]);
+ try sets_to_list(Ss) of
[] ->
- erlang:error(badarg, [T]);
+ erlang:error(badarg);
L ->
Type = types(Ss, []),
case member([], L) of
@@ -490,6 +459,7 @@ product(T) when is_tuple(T) ->
false ->
?SET(reverse(prod(L, [], [])), Type)
end
+ catch _:_ -> erlang:error(badarg)
end.
-spec(constant_function(Set, AnySet) -> Function when
@@ -502,10 +472,10 @@ constant_function(S, E) when ?IS_SET(S) ->
{Type, true} ->
NType = ?BINREL(Type, type(E)),
?SET(constant_function(?LIST(S), to_external(E), []), NType);
- _ -> erlang:error(badarg, [S, E])
+ _ -> erlang:error(badarg)
end;
-constant_function(S, E) when ?IS_ORDSET(S) ->
- erlang:error(badarg, [S, E]).
+constant_function(S, _) when ?IS_ORDSET(S) ->
+ erlang:error(badarg).
-spec(is_equal(AnySet1, AnySet2) -> Bool when
AnySet1 :: anyset(),
@@ -514,17 +484,17 @@ constant_function(S, E) when ?IS_ORDSET(S) ->
is_equal(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
case match_types(?TYPE(S1), ?TYPE(S2)) of
true -> ?LIST(S1) == ?LIST(S2);
- false -> erlang:error(type_mismatch, [S1, S2])
+ false -> erlang:error(type_mismatch)
end;
is_equal(S1, S2) when ?IS_ORDSET(S1), ?IS_ORDSET(S2) ->
case match_types(?ORDTYPE(S1), ?ORDTYPE(S2)) of
true -> ?ORDDATA(S1) == ?ORDDATA(S2);
- false -> erlang:error(type_mismatch, [S1, S2])
+ false -> erlang:error(type_mismatch)
end;
is_equal(S1, S2) when ?IS_SET(S1), ?IS_ORDSET(S2) ->
- erlang:error(type_mismatch, [S1, S2]);
+ erlang:error(type_mismatch);
is_equal(S1, S2) when ?IS_ORDSET(S1), ?IS_SET(S2) ->
- erlang:error(type_mismatch, [S1, S2]).
+ erlang:error(type_mismatch).
-spec(is_subset(Set1, Set2) -> Bool when
Bool :: boolean(),
@@ -533,7 +503,7 @@ is_equal(S1, S2) when ?IS_ORDSET(S1), ?IS_SET(S2) ->
is_subset(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
case match_types(?TYPE(S1), ?TYPE(S2)) of
true -> subset(?LIST(S1), ?LIST(S2));
- false -> erlang:error(type_mismatch, [S1, S2])
+ false -> erlang:error(type_mismatch)
end.
-spec(is_sofs_set(Term) -> Bool when
@@ -573,7 +543,7 @@ is_disjoint(S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
[] -> true;
[A | As] -> disjoint(?LIST(S2), A, As)
end;
- false -> erlang:error(type_mismatch, [S1, S2])
+ false -> erlang:error(type_mismatch)
end.
%%%
@@ -587,7 +557,7 @@ union(Sets) when ?IS_SET(Sets) ->
case ?TYPE(Sets) of
?SET_OF(Type) -> ?SET(lunion(?LIST(Sets)), Type);
?ANYTYPE -> Sets;
- _ -> erlang:error(badarg, [Sets])
+ _ -> erlang:error(badarg)
end.
-spec(intersection(SetOfSets) -> Set when
@@ -595,12 +565,12 @@ union(Sets) when ?IS_SET(Sets) ->
SetOfSets :: set_of_sets()).
intersection(Sets) when ?IS_SET(Sets) ->
case ?LIST(Sets) of
- [] -> erlang:error(badarg, [Sets]);
+ [] -> erlang:error(badarg);
[L | Ls] ->
case ?TYPE(Sets) of
?SET_OF(Type) ->
?SET(lintersection(Ls, L), Type);
- _ -> erlang:error(badarg, [Sets])
+ _ -> erlang:error(badarg)
end
end.
@@ -614,7 +584,7 @@ canonical_relation(Sets) when ?IS_SET(Sets) ->
?SET_OF(Type) ->
?SET(can_rel(?LIST(Sets), []), ?BINREL(Type, ST));
?ANYTYPE -> Sets;
- _ -> erlang:error(badarg, [Sets])
+ _ -> erlang:error(badarg)
end.
%%%
@@ -636,7 +606,7 @@ relation_to_family(R) when ?IS_SET(R) ->
?BINREL(DT, RT) ->
?SET(rel2family(?LIST(R)), ?FAMILY(DT, RT));
?ANYTYPE -> R;
- _Else -> erlang:error(badarg, [R])
+ _Else -> erlang:error(badarg)
end.
-spec(domain(BinRel) -> Set when
@@ -646,7 +616,7 @@ domain(R) when ?IS_SET(R) ->
case ?TYPE(R) of
?BINREL(DT, _) -> ?SET(dom(?LIST(R)), DT);
?ANYTYPE -> R;
- _Else -> erlang:error(badarg, [R])
+ _Else -> erlang:error(badarg)
end.
-spec(range(BinRel) -> Set when
@@ -656,7 +626,7 @@ range(R) when ?IS_SET(R) ->
case ?TYPE(R) of
?BINREL(_, RT) -> ?SET(ran(?LIST(R), []), RT);
?ANYTYPE -> R;
- _ -> erlang:error(badarg, [R])
+ _ -> erlang:error(badarg)
end.
-spec(field(BinRel) -> Set when
@@ -679,7 +649,7 @@ relative_product(RT) when is_tuple(RT) ->
relative_product(RL) when is_list(RL) ->
case relprod_n(RL, foo, false, false) of
{error, Reason} ->
- erlang:error(Reason, [RL]);
+ erlang:error(Reason);
Reply ->
Reply
end.
@@ -703,11 +673,11 @@ relative_product(RL, R) when is_list(RL), ?IS_SET(R) ->
EmptyR = case ?TYPE(R) of
?BINREL(_, _) -> ?LIST(R) =:= [];
?ANYTYPE -> true;
- _ -> erlang:error(badarg, [RL, R])
+ _ -> erlang:error(badarg)
end,
case relprod_n(RL, R, EmptyR, true) of
{error, Reason} ->
- erlang:error(Reason, [RL, R]);
+ erlang:error(Reason);
Reply ->
Reply
end.
@@ -720,18 +690,18 @@ relative_product1(R1, R2) when ?IS_SET(R1), ?IS_SET(R2) ->
{DTR1, RTR1} = case ?TYPE(R1) of
?BINREL(_, _) = R1T -> R1T;
?ANYTYPE -> {?ANYTYPE, ?ANYTYPE};
- _ -> erlang:error(badarg, [R1, R2])
+ _ -> erlang:error(badarg)
end,
{DTR2, RTR2} = case ?TYPE(R2) of
?BINREL(_, _) = R2T -> R2T;
?ANYTYPE -> {?ANYTYPE, ?ANYTYPE};
- _ -> erlang:error(badarg, [R1, R2])
+ _ -> erlang:error(badarg)
end,
case match_types(DTR1, DTR2) of
true when DTR1 =:= ?ANYTYPE -> R1;
true when DTR2 =:= ?ANYTYPE -> R2;
true -> ?SET(relprod(?LIST(R1), ?LIST(R2)), ?BINREL(RTR1, RTR2));
- false -> erlang:error(type_mismatch, [R1, R2])
+ false -> erlang:error(type_mismatch)
end.
-spec(converse(BinRel1) -> BinRel2 when
@@ -741,7 +711,7 @@ converse(R) when ?IS_SET(R) ->
case ?TYPE(R) of
?BINREL(DT, RT) -> ?SET(converse(?LIST(R), []), ?BINREL(RT, DT));
?ANYTYPE -> R;
- _ -> erlang:error(badarg, [R])
+ _ -> erlang:error(badarg)
end.
-spec(image(BinRel, Set1) -> Set2 when
@@ -755,10 +725,10 @@ image(R, S) when ?IS_SET(R), ?IS_SET(S) ->
true ->
?SET(usort(restrict(?LIST(S), ?LIST(R))), RT);
false ->
- erlang:error(type_mismatch, [R, S])
+ erlang:error(type_mismatch)
end;
?ANYTYPE -> R;
- _ -> erlang:error(badarg, [R, S])
+ _ -> erlang:error(badarg)
end.
-spec(inverse_image(BinRel, Set1) -> Set2 when
@@ -773,10 +743,10 @@ inverse_image(R, S) when ?IS_SET(R), ?IS_SET(S) ->
NL = restrict(?LIST(S), converse(?LIST(R), [])),
?SET(usort(NL), DT);
false ->
- erlang:error(type_mismatch, [R, S])
+ erlang:error(type_mismatch)
end;
?ANYTYPE -> R;
- _ -> erlang:error(badarg, [R, S])
+ _ -> erlang:error(badarg)
end.
-spec(strict_relation(BinRel1) -> BinRel2 when
@@ -787,7 +757,7 @@ strict_relation(R) when ?IS_SET(R) ->
Type = ?BINREL(_, _) ->
?SET(strict(?LIST(R), []), Type);
?ANYTYPE -> R;
- _ -> erlang:error(badarg, [R])
+ _ -> erlang:error(badarg)
end.
-spec(weak_relation(BinRel1) -> BinRel2 when
@@ -798,12 +768,12 @@ weak_relation(R) when ?IS_SET(R) ->
?BINREL(DT, RT) ->
case unify_types(DT, RT) of
[] ->
- erlang:error(badarg, [R]);
+ erlang:error(badarg);
Type ->
?SET(weak(?LIST(R)), ?BINREL(Type, Type))
end;
?ANYTYPE -> R;
- _ -> erlang:error(badarg, [R])
+ _ -> erlang:error(badarg)
end.
-spec(extension(BinRel1, Set, AnySet) -> BinRel2 when
@@ -816,7 +786,7 @@ extension(R, S, E) when ?IS_SET(R), ?IS_SET(S) ->
{T=?BINREL(DT, RT), ST, true} ->
case match_types(DT, ST) and match_types(RT, type(E)) of
false ->
- erlang:error(type_mismatch, [R, S, E]);
+ erlang:error(type_mismatch);
true ->
RL = ?LIST(R),
case extc([], ?LIST(S), to_external(E), RL) of
@@ -836,7 +806,7 @@ extension(R, S, E) when ?IS_SET(R), ?IS_SET(S) ->
?SET([], ?BINREL(ST, ET))
end;
{_, _, true} ->
- erlang:error(badarg, [R, S, E])
+ erlang:error(badarg)
end.
-spec(is_a_function(BinRel) -> Bool when
@@ -850,7 +820,7 @@ is_a_function(R) when ?IS_SET(R) ->
[{V,_} | Es] -> is_a_func(Es, V)
end;
?ANYTYPE -> true;
- _ -> erlang:error(badarg, [R])
+ _ -> erlang:error(badarg)
end.
-spec(restriction(BinRel1, Set) -> BinRel2 when
@@ -879,12 +849,12 @@ composite(Fn1, Fn2) when ?IS_SET(Fn1), ?IS_SET(Fn2) ->
?BINREL(DTF1, RTF1) = case ?TYPE(Fn1)of
?BINREL(_, _) = F1T -> F1T;
?ANYTYPE -> {?ANYTYPE, ?ANYTYPE};
- _ -> erlang:error(badarg, [Fn1, Fn2])
+ _ -> erlang:error(badarg)
end,
?BINREL(DTF2, RTF2) = case ?TYPE(Fn2) of
?BINREL(_, _) = F2T -> F2T;
?ANYTYPE -> {?ANYTYPE, ?ANYTYPE};
- _ -> erlang:error(badarg, [Fn1, Fn2])
+ _ -> erlang:error(badarg)
end,
case match_types(RTF1, DTF2) of
true when DTF1 =:= ?ANYTYPE -> Fn1;
@@ -894,9 +864,9 @@ composite(Fn1, Fn2) when ?IS_SET(Fn1), ?IS_SET(Fn2) ->
SL when is_list(SL) ->
?SET(sort(SL), ?BINREL(DTF1, RTF2));
Bad ->
- erlang:error(Bad, [Fn1, Fn2])
+ erlang:error(Bad)
end;
- false -> erlang:error(type_mismatch, [Fn1, Fn2])
+ false -> erlang:error(type_mismatch)
end.
-spec(inverse(Function1) -> Function2 when
@@ -909,10 +879,10 @@ inverse(Fn) when ?IS_SET(Fn) ->
SL when is_list(SL) ->
?SET(SL, ?BINREL(RT, DT));
Bad ->
- erlang:error(Bad, [Fn])
+ erlang:error(Bad)
end;
?ANYTYPE -> Fn;
- _ -> erlang:error(badarg, [Fn])
+ _ -> erlang:error(badarg)
end.
%%%
@@ -932,7 +902,7 @@ restriction(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) ->
empty ->
R;
error ->
- erlang:error(badarg, [I, R, S]);
+ erlang:error(badarg);
Sort ->
RL = ?LIST(R),
case {match_types(?REL_TYPE(I, RT), ST), ?LIST(S)} of
@@ -945,7 +915,7 @@ restriction(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) ->
{true, [E | Es]} ->
?SET(sort(restrict_n(I, keysort(I, RL), E, Es, [])), RT);
{false, _SL} ->
- erlang:error(type_mismatch, [I, R, S])
+ erlang:error(type_mismatch)
end
end;
restriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
@@ -963,28 +933,27 @@ restriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
NL = sort(restrict(?LIST(S2), converse(NSL, []))),
?SET(NL, Type1);
false ->
- erlang:error(type_mismatch, [SetFun, S1, S2])
+ erlang:error(type_mismatch)
end;
Bad ->
- erlang:error(Bad, [SetFun, S1, S2])
+ erlang:error(Bad)
end;
_ when Type1 =:= ?ANYTYPE ->
S1;
_XFun when ?IS_SET_OF(Type1) ->
- erlang:error(badarg, [SetFun, S1, S2]);
+ erlang:error(badarg);
XFun ->
FunT = XFun(Type1),
- case catch check_fun(Type1, XFun, FunT) of
- {'EXIT', _} ->
- erlang:error(badarg, [SetFun, S1, S2]);
+ try check_fun(Type1, XFun, FunT) of
Sort ->
case match_types(FunT, Type2) of
true ->
R1 = inverse_substitution(SL1, XFun, Sort),
?SET(sort(Sort, restrict(?LIST(S2), R1)), Type1);
false ->
- erlang:error(type_mismatch, [SetFun, S1, S2])
+ erlang:error(type_mismatch)
end
+ catch _:_ -> erlang:error(badarg)
end
end.
@@ -1000,7 +969,7 @@ drestriction(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) ->
empty ->
R;
error ->
- erlang:error(badarg, [I, R, S]);
+ erlang:error(badarg);
Sort ->
RL = ?LIST(R),
case {match_types(?REL_TYPE(I, RT), ST), ?LIST(S)} of
@@ -1013,7 +982,7 @@ drestriction(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) ->
{true, [E | Es]} ->
?SET(diff_restrict_n(I, keysort(I, RL), E, Es, []), RT);
{false, _SL} ->
- erlang:error(type_mismatch, [I, R, S])
+ erlang:error(type_mismatch)
end
end;
drestriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
@@ -1032,20 +1001,18 @@ drestriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
NL = sort(diff_restrict(SL2, converse(NSL, []))),
?SET(NL, Type1);
false ->
- erlang:error(type_mismatch, [SetFun, S1, S2])
+ erlang:error(type_mismatch)
end;
Bad ->
- erlang:error(Bad, [SetFun, S1, S2])
+ erlang:error(Bad)
end;
_ when Type1 =:= ?ANYTYPE ->
S1;
_XFun when ?IS_SET_OF(Type1) ->
- erlang:error(badarg, [SetFun, S1, S2]);
+ erlang:error(badarg);
XFun ->
FunT = XFun(Type1),
- case catch check_fun(Type1, XFun, FunT) of
- {'EXIT', _} ->
- erlang:error(badarg, [SetFun, S1, S2]);
+ try check_fun(Type1, XFun, FunT) of
Sort ->
case match_types(FunT, Type2) of
true ->
@@ -1053,8 +1020,9 @@ drestriction(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
SL2 = ?LIST(S2),
?SET(sort(Sort, diff_restrict(SL2, R1)), Type1);
false ->
- erlang:error(type_mismatch, [SetFun, S1, S2])
+ erlang:error(type_mismatch)
end
+ catch _:_ -> erlang:error(badarg)
end
end.
@@ -1068,7 +1036,7 @@ projection(I, Set) when is_integer(I), ?IS_SET(Set) ->
empty ->
Set;
error ->
- erlang:error(badarg, [I, Set]);
+ erlang:error(badarg);
_ when I =:= 1 ->
?SET(projection1(?LIST(Set)), ?REL_TYPE(I, Type));
_ ->
@@ -1087,7 +1055,7 @@ substitution(I, Set) when is_integer(I), ?IS_SET(Set) ->
empty ->
Set;
error ->
- erlang:error(badarg, [I, Set]);
+ erlang:error(badarg);
_Sort ->
NType = ?REL_TYPE(I, Type),
NSL = substitute_element(?LIST(Set), I, []),
@@ -1102,22 +1070,21 @@ substitution(SetFun, Set) when ?IS_SET(Set) ->
{SL, NewType} ->
?SET(reverse(SL), ?BINREL(Type, NewType));
Bad ->
- erlang:error(Bad, [SetFun, Set])
+ erlang:error(Bad)
end;
false ->
empty_set();
_ when Type =:= ?ANYTYPE ->
empty_set();
_XFun when ?IS_SET_OF(Type) ->
- erlang:error(badarg, [SetFun, Set]);
+ erlang:error(badarg);
XFun ->
FunT = XFun(Type),
- case catch check_fun(Type, XFun, FunT) of
- {'EXIT', _} ->
- erlang:error(badarg, [SetFun, Set]);
+ try check_fun(Type, XFun, FunT) of
_Sort ->
SL = substitute(L, XFun, []),
?SET(SL, ?BINREL(Type, FunT))
+ catch _:_ -> erlang:error(badarg)
end
end.
@@ -1139,7 +1106,7 @@ partition(I, Set) when is_integer(I), ?IS_SET(Set) ->
empty ->
Set;
error ->
- erlang:error(badarg, [I, Set]);
+ erlang:error(badarg);
false -> % I =:= 1
?SET(partition_n(I, ?LIST(Set)), ?SET_OF(Type));
true ->
@@ -1161,7 +1128,7 @@ partition(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) ->
empty ->
{R, R};
error ->
- erlang:error(badarg, [I, R, S]);
+ erlang:error(badarg);
Sort ->
RL = ?LIST(R),
case {match_types(?REL_TYPE(I, RT), ST), ?LIST(S)} of
@@ -1176,7 +1143,7 @@ partition(I, R, S) when is_integer(I), ?IS_SET(R), ?IS_SET(S) ->
[L1 | L2] = partition3_n(I, keysort(I,RL), E, Es, [], []),
{?SET(L1, RT), ?SET(L2, RT)};
{false, _SL} ->
- erlang:error(type_mismatch, [I, R, S])
+ erlang:error(type_mismatch)
end
end;
partition(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
@@ -1195,20 +1162,18 @@ partition(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
[L1 | L2] = partition3(?LIST(S2), R1),
{?SET(sort(L1), Type1), ?SET(sort(L2), Type1)};
false ->
- erlang:error(type_mismatch, [SetFun, S1, S2])
+ erlang:error(type_mismatch)
end;
Bad ->
- erlang:error(Bad, [SetFun, S1, S2])
+ erlang:error(Bad)
end;
_ when Type1 =:= ?ANYTYPE ->
{S1, S1};
_XFun when ?IS_SET_OF(Type1) ->
- erlang:error(badarg, [SetFun, S1, S2]);
+ erlang:error(badarg);
XFun ->
FunT = XFun(Type1),
- case catch check_fun(Type1, XFun, FunT) of
- {'EXIT', _} ->
- erlang:error(badarg, [SetFun, S1, S2]);
+ try check_fun(Type1, XFun, FunT) of
Sort ->
case match_types(FunT, Type2) of
true ->
@@ -1216,8 +1181,9 @@ partition(SetFun, S1, S2) when ?IS_SET(S1), ?IS_SET(S2) ->
[L1 | L2] = partition3(?LIST(S2), R1),
{?SET(sort(L1), Type1), ?SET(sort(L2), Type1)};
false ->
- erlang:error(type_mismatch, [SetFun, S1, S2])
+ erlang:error(type_mismatch)
end
+ catch _:_ -> erlang:error(badarg)
end
end.
@@ -1234,7 +1200,7 @@ multiple_relative_product(T, R) when is_tuple(T), ?IS_SET(R) ->
MProd = mul_relprod(tuple_to_list(T), 1, R),
relative_product(MProd);
false ->
- erlang:error(badarg, [T, R])
+ erlang:error(badarg)
end.
-spec(join(Relation1, I, Relation2, J) -> Relation3 when
@@ -1246,8 +1212,7 @@ multiple_relative_product(T, R) when is_tuple(T), ?IS_SET(R) ->
join(R1, I1, R2, I2)
when ?IS_SET(R1), ?IS_SET(R2), is_integer(I1), is_integer(I2) ->
case test_rel(R1, I1, lte) and test_rel(R2, I2, lte) of
- false ->
- erlang:error(badarg, [R1, I1, R2, I2]);
+ false -> erlang:error(badarg);
true when ?TYPE(R1) =:= ?ANYTYPE -> R1;
true when ?TYPE(R2) =:= ?ANYTYPE -> R2;
true ->
@@ -1294,7 +1259,7 @@ family_to_relation(F) when ?IS_SET(F) ->
?FAMILY(DT, RT) ->
?SET(family2rel(?LIST(F), []), ?BINREL(DT, RT));
?ANYTYPE -> F;
- _ -> erlang:error(badarg, [F])
+ _ -> erlang:error(badarg)
end.
-spec(family_specification(Fun, Family1) -> Family2 when
@@ -1314,10 +1279,10 @@ family_specification(Fun, F) when ?IS_SET(F) ->
SL when is_list(SL) ->
?SET(SL, FType);
Bad ->
- erlang:error(Bad, [Fun, F])
+ erlang:error(Bad)
end;
?ANYTYPE -> F;
- _ -> erlang:error(badarg, [Fun, F])
+ _ -> erlang:error(badarg)
end.
-spec(union_of_family(Family) -> Set when
@@ -1328,7 +1293,7 @@ union_of_family(F) when ?IS_SET(F) ->
?FAMILY(_DT, Type) ->
?SET(un_of_fam(?LIST(F), []), Type);
?ANYTYPE -> F;
- _ -> erlang:error(badarg, [F])
+ _ -> erlang:error(badarg)
end.
-spec(intersection_of_family(Family) -> Set when
@@ -1341,9 +1306,9 @@ intersection_of_family(F) when ?IS_SET(F) ->
FU when is_list(FU) ->
?SET(FU, Type);
Bad ->
- erlang:error(Bad, [F])
+ erlang:error(Bad)
end;
- _ -> erlang:error(badarg, [F])
+ _ -> erlang:error(badarg)
end.
-spec(family_union(Family1) -> Family2 when
@@ -1354,7 +1319,7 @@ family_union(F) when ?IS_SET(F) ->
?FAMILY(DT, ?SET_OF(Type)) ->
?SET(fam_un(?LIST(F), []), ?FAMILY(DT, Type));
?ANYTYPE -> F;
- _ -> erlang:error(badarg, [F])
+ _ -> erlang:error(badarg)
end.
-spec(family_intersection(Family1) -> Family2 when
@@ -1367,10 +1332,10 @@ family_intersection(F) when ?IS_SET(F) ->
FU when is_list(FU) ->
?SET(FU, ?FAMILY(DT, Type));
Bad ->
- erlang:error(Bad, [F])
+ erlang:error(Bad)
end;
?ANYTYPE -> F;
- _ -> erlang:error(badarg, [F])
+ _ -> erlang:error(badarg)
end.
-spec(family_domain(Family1) -> Family2 when
@@ -1382,7 +1347,7 @@ family_domain(F) when ?IS_SET(F) ->
?SET(fam_dom(?LIST(F), []), ?FAMILY(FDT, DT));
?ANYTYPE -> F;
?FAMILY(_, ?ANYTYPE) -> F;
- _ -> erlang:error(badarg, [F])
+ _ -> erlang:error(badarg)
end.
-spec(family_range(Family1) -> Family2 when
@@ -1394,7 +1359,7 @@ family_range(F) when ?IS_SET(F) ->
?SET(fam_ran(?LIST(F), []), ?FAMILY(DT, RT));
?ANYTYPE -> F;
?FAMILY(_, ?ANYTYPE) -> F;
- _ -> erlang:error(badarg, [F])
+ _ -> erlang:error(badarg)
end.
-spec(family_field(Family1) -> Family2 when
@@ -1428,12 +1393,12 @@ family_difference(F1, F2) ->
fam_binop(F1, F2, FF) when ?IS_SET(F1), ?IS_SET(F2) ->
case unify_types(?TYPE(F1), ?TYPE(F2)) of
[] ->
- erlang:error(type_mismatch, [F1, F2]);
+ erlang:error(type_mismatch);
?ANYTYPE ->
F1;
Type = ?FAMILY(_, _) ->
?SET(FF(?LIST(F1), ?LIST(F2), []), Type);
- _ -> erlang:error(badarg, [F1, F2])
+ _ -> erlang:error(badarg)
end.
-spec(partition_family(SetFun, Set) -> Family when
@@ -1446,7 +1411,7 @@ partition_family(I, Set) when is_integer(I), ?IS_SET(Set) ->
empty ->
Set;
error ->
- erlang:error(badarg, [I, Set]);
+ erlang:error(badarg);
false -> % when I =:= 1
?SET(fam_partition_n(I, ?LIST(Set)),
?BINREL(?REL_TYPE(I, Type), ?SET_OF(Type)));
@@ -1464,23 +1429,22 @@ partition_family(SetFun, Set) when ?IS_SET(Set) ->
P = fam_partition(converse(NSL, []), true),
?SET(reverse(P), ?BINREL(NewType, ?SET_OF(Type)));
Bad ->
- erlang:error(Bad, [SetFun, Set])
+ erlang:error(Bad)
end;
false ->
empty_set();
_ when Type =:= ?ANYTYPE ->
empty_set();
_XFun when ?IS_SET_OF(Type) ->
- erlang:error(badarg, [SetFun, Set]);
+ erlang:error(badarg);
XFun ->
DType = XFun(Type),
- case catch check_fun(Type, XFun, DType) of
- {'EXIT', _} ->
- erlang:error(badarg, [SetFun, Set]);
+ try check_fun(Type, XFun, DType) of
Sort ->
Ts = inverse_substitution(?LIST(Set), XFun, Sort),
P = fam_partition(Ts, Sort),
?SET(reverse(P), ?BINREL(DType, ?SET_OF(Type)))
+ catch _:_ -> erlang:error(badarg)
end
end.
@@ -1499,13 +1463,13 @@ family_projection(SetFun, F) when ?IS_SET(F) ->
{SL, NewType} ->
?SET(SL, ?BINREL(DT, NewType));
Bad ->
- erlang:error(Bad, [SetFun, F])
+ erlang:error(Bad)
end;
_ ->
- erlang:error(badarg, [SetFun, F])
+ erlang:error(badarg)
end;
?ANYTYPE -> F;
- _ -> erlang:error(badarg, [SetFun, F])
+ _ -> erlang:error(badarg)
end.
%%%
@@ -1519,7 +1483,7 @@ family_to_digraph(F) when ?IS_SET(F) ->
case ?TYPE(F) of
?FAMILY(_, _) -> fam2digraph(F, digraph:new());
?ANYTYPE -> digraph:new();
- _Else -> erlang:error(badarg, [F])
+ _Else -> erlang:error(badarg)
end.
-spec(family_to_digraph(Family, GraphType) -> Graph when
@@ -1530,27 +1494,27 @@ family_to_digraph(F, Type) when ?IS_SET(F) ->
case ?TYPE(F) of
?FAMILY(_, _) -> ok;
?ANYTYPE -> ok;
- _Else -> erlang:error(badarg, [F, Type])
+ _Else -> erlang:error(badarg)
end,
try digraph:new(Type) of
G -> case catch fam2digraph(F, G) of
{error, Reason} ->
true = digraph:delete(G),
- erlang:error(Reason, [F, Type]);
+ erlang:error(Reason);
_ ->
G
end
catch
- error:badarg -> erlang:error(badarg, [F, Type])
+ error:badarg -> erlang:error(badarg)
end.
-spec(digraph_to_family(Graph) -> Family when
Graph :: digraph:graph(),
Family :: family()).
digraph_to_family(G) ->
- case catch digraph_family(G) of
- {'EXIT', _} -> erlang:error(badarg, [G]);
+ try digraph_family(G) of
L -> ?SET(L, ?FAMILY(?ATOM_TYPE, ?ATOM_TYPE))
+ catch _:_ -> erlang:error(badarg)
end.
-spec(digraph_to_family(Graph, Type) -> Family when
@@ -1560,12 +1524,12 @@ digraph_to_family(G) ->
digraph_to_family(G, T) ->
case {is_type(T), T} of
{true, ?SET_OF(?FAMILY(_,_) = Type)} ->
- case catch digraph_family(G) of
- {'EXIT', _} -> erlang:error(badarg, [G, T]);
+ try digraph_family(G) of
L -> ?SET(L, Type)
+ catch _:_ -> erlang:error(badarg)
end;
_ ->
- erlang:error(badarg, [G, T])
+ erlang:error(badarg)
end.
%%
@@ -1713,14 +1677,15 @@ func_type([], SL, Type, F) ->
setify(L, ?SET_OF(Atom)) when ?IS_ATOM_TYPE(Atom), Atom =/= ?ANYTYPE ->
?SET(usort(L), Atom);
setify(L, ?SET_OF(Type0)) ->
- case catch is_no_lists(Type0) of
- {'EXIT', _} ->
- {?SET_OF(Type), Set} = create(L, Type0, Type0, []),
- ?SET(Set, Type);
+ try is_no_lists(Type0) of
N when is_integer(N) ->
- rel(L, N, Type0);
+ rel(L, N, Type0);
Sizes ->
make_oset(L, Sizes, L, Type0)
+ catch
+ _:_ ->
+ {?SET_OF(Type), Set} = create(L, Type0, Type0, []),
+ ?SET(Set, Type)
end;
setify(E, Type0) ->
{Type, OrdSet} = make_element(E, Type0, Type0),
diff --git a/lib/stdlib/src/stdlib.appup.src b/lib/stdlib/src/stdlib.appup.src
index 979161fef7..3c9e95e3a9 100644
--- a/lib/stdlib/src/stdlib.appup.src
+++ b/lib/stdlib/src/stdlib.appup.src
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
{"%VSN%",
%% Up from - max one major revision back
- [{<<"3\\.[0-1](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.*
+ [{<<"3\\.[0-3](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.*
%% Down to - max one major revision back
- [{<<"3\\.[0-1](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.*
+ [{<<"3\\.[0-3](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.*
}.
diff --git a/lib/stdlib/src/zip.erl b/lib/stdlib/src/zip.erl
index 340cc21390..fadf96146e 100644
--- a/lib/stdlib/src/zip.erl
+++ b/lib/stdlib/src/zip.erl
@@ -179,19 +179,6 @@
external_attr,
local_header_offset}).
-%% Unix extra fields (not yet supported)
--define(UNIX_EXTRA_FIELD_TAG, 16#000d).
--record(unix_extra_field, {atime,
- mtime,
- uid,
- gid}).
-
-%% extended timestamps (not yet supported)
--define(EXTENDED_TIMESTAMP_TAG, 16#5455).
-%% -record(extended_timestamp, {mtime,
-%% atime,
-%% ctime}).
-
-define(END_OF_CENTRAL_DIR_MAGIC, 16#06054b50).
-define(END_OF_CENTRAL_DIR_SZ, (4+2+2+2+2+4+4+2)).
@@ -381,9 +368,12 @@ do_unzip(F, Options) ->
{Info, In1} = get_central_dir(In0, RawIterator, Input),
%% get rid of zip-comment
Z = zlib:open(),
- Files = get_z_files(Info, Z, In1, Opts, []),
- zlib:close(Z),
- Input(close, In1),
+ Files = try
+ get_z_files(Info, Z, In1, Opts, [])
+ after
+ zlib:close(Z),
+ Input(close, In1)
+ end,
{ok, Files}.
%% Iterate over all files in a zip archive
@@ -460,11 +450,20 @@ do_zip(F, Files, Options) ->
#zip_opts{output = Output, open_opts = OpO} = Opts,
Out0 = Output({open, F, OpO}, []),
Z = zlib:open(),
- {Out1, LHS, Pos} = put_z_files(Files, Z, Out0, 0, Opts, []),
- zlib:close(Z),
- Out2 = put_central_dir(LHS, Pos, Out1, Opts),
- Out3 = Output({close, F}, Out2),
- {ok, Out3}.
+ try
+ {Out1, LHS, Pos} = put_z_files(Files, Z, Out0, 0, Opts, []),
+ zlib:close(Z),
+ Out2 = put_central_dir(LHS, Pos, Out1, Opts),
+ Out3 = Output({close, F}, Out2),
+ {ok, Out3}
+ catch
+ C:R ->
+ Stk = erlang:get_stacktrace(),
+ zlib:close(Z),
+ Output({close, F}, Out0),
+ erlang:raise(C, R, Stk)
+ end.
+
%% List zip directory contents
%%
@@ -1379,12 +1378,7 @@ cd_file_header_to_file_info(FileName,
gid = 0},
add_extra_info(FI, ExtraField).
-%% add extra info to file (some day when we implement it)
-add_extra_info(FI, <<?EXTENDED_TIMESTAMP_TAG:16/little, _Rest/binary>>) ->
- FI; % not yet supported, some other day...
-add_extra_info(FI, <<?UNIX_EXTRA_FIELD_TAG:16/little, Rest/binary>>) ->
- _UnixExtra = unix_extra_field_and_var_from_bin(Rest),
- FI; % not yet supported, and not widely used
+%% Currently, we ignore all the extra fields.
add_extra_info(FI, _) ->
FI.
@@ -1572,20 +1566,6 @@ dos_date_time_from_datetime({{Year, Month, Day}, {Hour, Min, Sec}}) ->
<<DosDate:16>> = <<YearFrom1980:7, Month:4, Day:5>>,
{DosDate, DosTime}.
-unix_extra_field_and_var_from_bin(<<TSize:16/little,
- ATime:32/little,
- MTime:32/little,
- UID:16/little,
- GID:16/little,
- Var:TSize/binary>>) ->
- {#unix_extra_field{atime = ATime,
- mtime = MTime,
- uid = UID,
- gid = GID},
- Var};
-unix_extra_field_and_var_from_bin(_) ->
- throw(bad_unix_extra_field).
-
%% A pwrite-like function for iolists (used by memory-option)
pwrite_binary(B, Pos, Bin) when byte_size(B) =:= Pos ->
diff --git a/lib/stdlib/test/base64_SUITE.erl b/lib/stdlib/test/base64_SUITE.erl
index d0abe5c961..6ddc67464c 100644
--- a/lib/stdlib/test/base64_SUITE.erl
+++ b/lib/stdlib/test/base64_SUITE.erl
@@ -82,7 +82,7 @@ base64_decode(Config) when is_list(Config) ->
Alphabet = list_to_binary(lists:seq(0, 255)),
Alphabet = base64:decode(base64:encode(Alphabet)),
- %% Encoded base 64 strings may be devided by non base 64 chars.
+ %% Encoded base 64 strings may be divided by non base 64 chars.
%% In this cases whitespaces.
"0123456789!@#0^&*();:<>,. []{}" =
base64:decode_to_string(
diff --git a/lib/stdlib/test/beam_lib_SUITE.erl b/lib/stdlib/test/beam_lib_SUITE.erl
index 4521ecc0ef..279e15f703 100644
--- a/lib/stdlib/test/beam_lib_SUITE.erl
+++ b/lib/stdlib/test/beam_lib_SUITE.erl
@@ -81,12 +81,8 @@ normal(Conf) when is_list(Conf) ->
NoOfTables = length(ets:all()),
P0 = pps(),
- CompileFlags = [{outdir,PrivDir}, debug_info],
- {ok,_} = compile:file(Source, CompileFlags),
- {ok, Binary} = file:read_file(BeamFile),
-
- do_normal(BeamFile),
- do_normal(Binary),
+ do_normal(Source, PrivDir, BeamFile, []),
+ do_normal(Source, PrivDir, BeamFile, [no_utf8_atoms]),
{ok,_} = compile:file(Source, [{outdir,PrivDir}, no_debug_info]),
{ok, {simple, [{abstract_code, no_abstract_code}]}} =
@@ -101,7 +97,15 @@ normal(Conf) when is_list(Conf) ->
true = (P0 == pps()),
ok.
-do_normal(BeamFile) ->
+do_normal(Source, PrivDir, BeamFile, Opts) ->
+ CompileFlags = [{outdir,PrivDir}, debug_info | Opts],
+ {ok,_} = compile:file(Source, CompileFlags),
+ {ok, Binary} = file:read_file(BeamFile),
+
+ do_normal(BeamFile, Opts),
+ do_normal(Binary, Opts).
+
+do_normal(BeamFile, Opts) ->
Imports = {imports, [{erlang, get_module_info, 1},
{erlang, get_module_info, 2},
{lists, member, 2}]},
@@ -130,20 +134,31 @@ do_normal(BeamFile) ->
beam_lib:chunks(BeamFile, [abstract_code]),
%% Test reading optional chunks.
- All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"],
+ All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT", "AtU8"],
{ok,{simple,Chunks}} = beam_lib:chunks(BeamFile, All, [allow_missing_chunks]),
- verify_simple(Chunks).
+ case {verify_simple(Chunks),Opts} of
+ {{missing_chunk, AtomBin}, []} when is_binary(AtomBin) -> ok;
+ {{AtomBin, missing_chunk}, [no_utf8_atoms]} when is_binary(AtomBin) -> ok
+ end,
-verify_simple([{"Atom", AtomBin},
+ %% Make sure that reading the atom chunk works when the 'allow_missing_chunks'
+ %% option is used.
+ Some = ["Code",atoms,"ExpT","LitT"],
+ {ok,{simple,SomeChunks}} = beam_lib:chunks(BeamFile, Some, [allow_missing_chunks]),
+ [{"Code",<<_/binary>>},{atoms,[_|_]},{"ExpT",<<_/binary>>},{"LitT",missing_chunk}] =
+ SomeChunks.
+
+verify_simple([{"Atom", PlainAtomChunk},
{"Code", CodeBin},
{"StrT", StrBin},
{"ImpT", ImpBin},
{"ExpT", ExpBin},
{"FunT", missing_chunk},
- {"LitT", missing_chunk}])
- when is_binary(AtomBin), is_binary(CodeBin), is_binary(StrBin),
+ {"LitT", missing_chunk},
+ {"AtU8", AtU8Chunk}])
+ when is_binary(CodeBin), is_binary(StrBin),
is_binary(ImpBin), is_binary(ExpBin) ->
- ok.
+ {PlainAtomChunk, AtU8Chunk}.
%% Read invalid beam files.
error(Conf) when is_list(Conf) ->
@@ -211,7 +226,7 @@ last_chunk(Bin) ->
do_error(BeamFile, ACopy) ->
%% evil tests
Chunks = chunk_info(BeamFile),
- {value, {_, AtomStart, _}} = lists:keysearch("Atom", 1, Chunks),
+ {value, {_, AtomStart, _}} = lists:keysearch("AtU8", 1, Chunks),
{value, {_, ImportStart, _}} = lists:keysearch("ImpT", 1, Chunks),
{value, {_, AbstractStart, _}} = lists:keysearch("Abst", 1, Chunks),
{value, {_, AttributesStart, _}} =
@@ -234,7 +249,7 @@ do_error(BeamFile, ACopy) ->
verify(not_a_beam_file, beam_lib:info(BF7)),
BF8 = set_byte(ACopy, BeamFile, 13, 17),
- verify(missing_chunk, beam_lib:chunks(BF8, ["Atom"])),
+ verify(missing_chunk, beam_lib:chunks(BF8, ["AtU8"])),
BF9 = set_byte(ACopy, BeamFile, CompileInfoStart+10, 17),
verify(invalid_chunk, beam_lib:chunks(BF9, [compile_info])).
diff --git a/lib/stdlib/test/dets_SUITE.erl b/lib/stdlib/test/dets_SUITE.erl
index aa31fdde5a..95c9b47465 100644
--- a/lib/stdlib/test/dets_SUITE.erl
+++ b/lib/stdlib/test/dets_SUITE.erl
@@ -3012,8 +3012,13 @@ repair_continuation(Config) ->
MS = [{'_',[],[true]}],
- {[true], C1} = dets:select(Tab, MS, 1),
- C2 = binary_to_term(term_to_binary(C1)),
+ SRes = term_to_binary(dets:select(Tab, MS, 1)),
+ %% Get rid of compiled match spec
+ lists:foreach(fun (P) ->
+ garbage_collect(P)
+ end, processes()),
+ {[true], C2} = binary_to_term(SRes),
+
{'EXIT', {badarg, _}} = (catch dets:select(C2)),
C3 = dets:repair_continuation(C2, MS),
{[true], C4} = dets:select(C3),
diff --git a/lib/stdlib/test/edlin_expand_SUITE.erl b/lib/stdlib/test/edlin_expand_SUITE.erl
index 718d91c6a3..1f694ea549 100644
--- a/lib/stdlib/test/edlin_expand_SUITE.erl
+++ b/lib/stdlib/test/edlin_expand_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -21,7 +21,8 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2,
init_per_group/2,end_per_group/2]).
--export([normal/1, quoted_fun/1, quoted_module/1, quoted_both/1, erl_1152/1]).
+-export([normal/1, quoted_fun/1, quoted_module/1, quoted_both/1, erl_1152/1,
+ erl_352/1]).
-include_lib("common_test/include/ct.hrl").
@@ -36,7 +37,7 @@ suite() ->
{timetrap,{minutes,1}}].
all() ->
- [normal, quoted_fun, quoted_module, quoted_both, erl_1152].
+ [normal, quoted_fun, quoted_module, quoted_both, erl_1152, erl_352].
groups() ->
[].
@@ -153,6 +154,78 @@ erl_1152(Config) when is_list(Config) ->
"\n"++"foo"++" "++[1089]++_ = do_format(["foo",[1089]]),
ok.
+erl_352(Config) when is_list(Config) ->
+ erl_352_test(3, 3),
+
+ erl_352_test(3, 75),
+ erl_352_test(3, 76, [trailing]),
+ erl_352_test(4, 74),
+ erl_352_test(4, 75, [leading]),
+ erl_352_test(4, 76, [leading, trailing]),
+
+ erl_352_test(75, 3),
+ erl_352_test(76, 3, [leading]),
+ erl_352_test(74, 4),
+ erl_352_test(75, 4, [leading]),
+ erl_352_test(76, 4, [leading]),
+
+ erl_352_test(74, 74, [leading]),
+ erl_352_test(74, 75, [leading]),
+ erl_352_test(74, 76, [leading, trailing]).
+
+erl_352_test(PrefixLen, SuffixLen) ->
+ erl_352_test(PrefixLen, SuffixLen, []).
+
+erl_352_test(PrefixLen, SuffixLen, Dots) ->
+ io:format("\nPrefixLen = ~w, SuffixLen = ~w\n", [PrefixLen, SuffixLen]),
+
+ PrefixM = lists:duplicate(PrefixLen, $p),
+ SuffixM = lists:duplicate(SuffixLen, $s),
+ LM = [PrefixM ++ S ++ SuffixM || S <- ["1", "2"]],
+ StrM = do_format(LM),
+ check_leading(StrM, "", PrefixM, SuffixM, Dots),
+
+ PrefixF = lists:duplicate(PrefixLen, $p),
+ SuffixF = lists:duplicate(SuffixLen-2, $s),
+ LF = [{PrefixF ++ S ++ SuffixF, 1} || S <- ["1", "2"]],
+ StrF = do_format(LF),
+ true = check_leading(StrF, "/1", PrefixF, SuffixF, Dots),
+
+ ok.
+
+check_leading(FormStr, ArityStr, Prefix, Suffix, Dots) ->
+ List = string:tokens(FormStr, "\n "),
+ io:format("~p\n", [List]),
+ true = lists:all(fun(L) -> length(L) < 80 end, List),
+ case lists:member(leading, Dots) of
+ true ->
+ true = lists:all(fun(L) ->
+ {"...", Rest} = lists:split(3, L),
+ check_trailing(Rest, ArityStr,
+ Suffix, Dots)
+ end, List);
+ false ->
+ true = lists:all(fun(L) ->
+ {Prefix, Rest} =
+ lists:split(length(Prefix), L),
+ check_trailing(Rest, ArityStr,
+ Suffix, Dots)
+ end, List)
+ end.
+
+check_trailing([I|Str], ArityStr, Suffix, Dots) ->
+ true = lists:member(I, [$1, $2]),
+ case lists:member(trailing, Dots) of
+ true ->
+ {Rest, "..." ++ ArityStr} =
+ lists:split(length(Str) - (3 + length(ArityStr)), Str),
+ true = lists:prefix(Rest, Suffix);
+ false ->
+ {Rest, ArityStr} =
+ lists:split(length(Str) - length(ArityStr), Str),
+ Rest =:= Suffix
+ end.
+
do_expand(String) ->
edlin_expand:expand(lists:reverse(String)).
diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl
index c86e17f70c..c7dcd9ae16 100644
--- a/lib/stdlib/test/erl_lint_SUITE.erl
+++ b/lib/stdlib/test/erl_lint_SUITE.erl
@@ -64,7 +64,7 @@
predef/1,
maps/1,maps_type/1,maps_parallel_match/1,
otp_11851/1,otp_11879/1,otp_13230/1,
- record_errors/1]).
+ record_errors/1, otp_xxxxx/1]).
suite() ->
[{ct_hooks,[ts_install_cth]},
@@ -84,7 +84,7 @@ all() ->
too_many_arguments, basic_errors, bin_syntax_errors, predef,
maps, maps_type, maps_parallel_match,
otp_11851, otp_11879, otp_13230,
- record_errors].
+ record_errors, otp_xxxxx].
groups() ->
[{unused_vars_warn, [],
@@ -2002,22 +2002,22 @@ otp_5362(Config) when is_list(Config) ->
<<"-compile(nowarn_deprecated_function).
-compile(nowarn_bif_clash).
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
- {[nowarn_unused_function,
+ {[nowarn_unused_function,
warn_deprecated_function,
warn_bif_clash]},
{error,
[{5,erl_lint,{call_to_redefined_old_bif,{spawn,1}}}],
- [{4,erl_lint,{deprecated,{erlang,hash,2},{erlang,phash2,2},
- "a future release"}}]}},
-
+ [{4,erl_lint,{deprecated,{erlang,now,0},
+ "Deprecated BIF. See the \"Time and Time Correction in Erlang\" "
+ "chapter of the ERTS User's Guide for more information."}}]}},
{otp_5362_5,
<<"-compile(nowarn_deprecated_function).
-compile(nowarn_bif_clash).
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
{[nowarn_unused_function]},
@@ -2026,37 +2026,37 @@ otp_5362(Config) when is_list(Config) ->
%% The special nowarn_X are not affected by general warn_X.
{otp_5362_6,
- <<"-compile({nowarn_deprecated_function,{erlang,hash,2}}).
+ <<"-compile({nowarn_deprecated_function,{erlang,now,0}}).
-compile({nowarn_bif_clash,{spawn,1}}).
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
- {[nowarn_unused_function,
- warn_deprecated_function,
+ {[nowarn_unused_function,
+ warn_deprecated_function,
warn_bif_clash]},
{errors,
[{2,erl_lint,disallowed_nowarn_bif_clash}],[]}},
{otp_5362_7,
<<"-export([spawn/1]).
- -compile({nowarn_deprecated_function,{erlang,hash,2}}).
+ -compile({nowarn_deprecated_function,{erlang,now,0}}).
-compile({nowarn_bif_clash,{spawn,1}}).
-compile({nowarn_bif_clash,{spawn,2}}). % bad
-compile([{nowarn_deprecated_function,
- [{erlang,hash,-1},{3,hash,-1}]}, % 2 bad
- {nowarn_deprecated_function, {{a,b,c},hash,-1}}]). % bad
+ [{erlang,now,-1},{3,now,-1}]}, % 2 bad
+ {nowarn_deprecated_function, {{a,b,c},now,-1}}]). % bad
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
{[nowarn_unused_function]},
{error,[{3,erl_lint,disallowed_nowarn_bif_clash},
{4,erl_lint,disallowed_nowarn_bif_clash},
{4,erl_lint,{bad_nowarn_bif_clash,{spawn,2}}}],
- [{5,erl_lint,{bad_nowarn_deprecated_function,{3,hash,-1}}},
- {5,erl_lint,{bad_nowarn_deprecated_function,{erlang,hash,-1}}},
- {5,erl_lint,{bad_nowarn_deprecated_function,{{a,b,c},hash,-1}}}]}
+ [{5,erl_lint,{bad_nowarn_deprecated_function,{3,now,-1}}},
+ {5,erl_lint,{bad_nowarn_deprecated_function,{erlang,now,-1}}},
+ {5,erl_lint,{bad_nowarn_deprecated_function,{{a,b,c},now,-1}}}]}
},
{otp_5362_8,
@@ -2064,14 +2064,15 @@ otp_5362(Config) when is_list(Config) ->
-compile(warn_deprecated_function).
-compile(warn_bif_clash).
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
{[nowarn_unused_function,
{nowarn_bif_clash,{spawn,1}}]}, % has no effect
{warnings,
- [{5,erl_lint,{deprecated,{erlang,hash,2},{erlang,phash2,2},
- "a future release"}}]}},
+ [{5,erl_lint,{deprecated,{erlang,now,0},
+ "Deprecated BIF. See the \"Time and Time Correction in Erlang\" "
+ "chapter of the ERTS User's Guide for more information."}}]}},
{otp_5362_9,
<<"-include_lib(\"stdlib/include/qlc.hrl\").
@@ -2083,11 +2084,11 @@ otp_5362(Config) when is_list(Config) ->
[]},
{otp_5362_10,
- <<"-compile({nowarn_deprecated_function,{erlang,hash,2}}).
+ <<"-compile({nowarn_deprecated_function,{erlang,now,0}}).
-compile({nowarn_bif_clash,{spawn,1}}).
-import(x,[spawn/1]).
spin(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
{[nowarn_unused_function,
@@ -2097,11 +2098,11 @@ otp_5362(Config) when is_list(Config) ->
[{2,erl_lint,disallowed_nowarn_bif_clash}],[]}},
{call_deprecated_function,
- <<"t(X) -> erlang:hash(X, 2000).">>,
+ <<"t(X) -> crypto:md5(X).">>,
[],
{warnings,
- [{1,erl_lint,{deprecated,{erlang,hash,2},
- {erlang,phash2,2},"a future release"}}]}},
+ [{1,erl_lint,{deprecated,{crypto,md5,1},
+ {crypto,hash,2}, "a future release"}}]}},
{call_removed_function,
<<"t(X) -> regexp:match(X).">>,
@@ -3869,6 +3870,55 @@ record_errors(Config) when is_list(Config) ->
{3,erl_lint,{redefine_field,r,a}}],[]}}],
run(Config, Ts).
+otp_xxxxx(Config) ->
+ Ts = [{constraint1,
+ <<"-export([t/1]).
+ -spec t(X) -> X when is_subtype(integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ {errors,
+ [{2,erl_parse,"unsupported constraint " ++ ["is_subtype"]}],
+ []}},
+ {constraint2,
+ <<"-export([t/1]).
+ -spec t(X) -> X when bad_atom(X, integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ {errors,
+ [{2,erl_parse,"unsupported constraint " ++ ["bad_atom"]}],
+ []}},
+ {constraint3,
+ <<"-export([t/1]).
+ -spec t(X) -> X when is_subtype(bad_variable, integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ {errors,[{2,erl_parse,"bad type variable"}],[]}},
+ {constraint4,
+ <<"-export([t/1]).
+ -spec t(X) -> X when is_subtype(atom(), integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ {errors,[{2,erl_parse,"bad type variable"}],[]}},
+ {constraint5,
+ <<"-export([t/1]).
+ -spec t(X) -> X when is_subtype(X, integer()).
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ []},
+ {constraint6,
+ <<"-export([t/1]).
+ -spec t(X) -> X when X :: integer().
+ t(a) -> foo:bar().
+ ">>,
+ [],
+ []}],
+ run(Config, Ts).
+
run(Config, Tests) ->
F = fun({N,P,Ws,E}, BadL) ->
case catch run_test(Config, P, Ws) of
diff --git a/lib/stdlib/test/erl_pp_SUITE.erl b/lib/stdlib/test/erl_pp_SUITE.erl
index 13c5662741..31ea3210a8 100644
--- a/lib/stdlib/test/erl_pp_SUITE.erl
+++ b/lib/stdlib/test/erl_pp_SUITE.erl
@@ -825,12 +825,13 @@ type_examples() ->
%% is_subtype(V, T) syntax, we need a few examples of the syntax.
{ex31,<<"-spec t1(FooBar :: t99()) -> t99();"
"(t2()) -> t2();"
- "('\\'t::4'()) -> '\\'t::4'() when is_subtype('\\'t::4'(), t24);"
- "(t23()) -> t23() when is_subtype(t23(), atom()),"
- " is_subtype(t23(), t14());"
- "(t24()) -> t24() when is_subtype(t24(), atom()),"
- " is_subtype(t24(), t14()),"
- " is_subtype(t24(), '\\'t::4'()).">>},
+ "('\\'t::4'()) -> {'\\'t::4'(), B}"
+ " when is_subtype(B, '\\'t::4'());"
+ "(t23()) -> C when is_subtype(C, atom()),"
+ " is_subtype(C, t14());"
+ "(t24()) -> D when is_subtype(D, atom()),"
+ " is_subtype(D, t14()),"
+ " is_subtype(D, '\\'t::4'()).">>},
{ex32,<<"-spec mod:t2() -> any(). ">>},
{ex33,<<"-opaque attributes_data() :: "
"[{'column', column()} | {'line', info_line()} |"
diff --git a/lib/stdlib/test/erl_scan_SUITE.erl b/lib/stdlib/test/erl_scan_SUITE.erl
index 4ae734eb65..7d0ba967f9 100644
--- a/lib/stdlib/test/erl_scan_SUITE.erl
+++ b/lib/stdlib/test/erl_scan_SUITE.erl
@@ -772,10 +772,9 @@ unicode() ->
erl_scan:string([1089]),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([1089], {1,1}),
- {error,{1,erl_scan,{illegal,atom}},1} =
- erl_scan:string("'a"++[1089]++"b'", 1),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,6}} =
- erl_scan:string("'a"++[1089]++"b'", {1,1}),
+ {error,{{1,3},erl_scan,{illegal,character}},{1,4}} =
+ erl_scan:string("'a" ++ [999999999] ++ "c'", {1,1}),
+
test("\"a"++[1089]++"b\""),
{ok,[{char,1,1}],1} =
erl_scan_string([$$,$\\,$^,1089], 1),
@@ -786,8 +785,8 @@ unicode() ->
erl_scan:format_error(Error),
{error,{{1,1},erl_scan,_},{1,11}} =
erl_scan:string("\"qa\\x{aaa}",{1,1}),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,12}} =
- erl_scan:string("'qa\\x{aaa}'",{1,1}),
+ {error,{{1,1},erl_scan,_},{1,11}} =
+ erl_scan:string("'qa\\x{aaa}",{1,1}),
{ok,[{char,1,1089}],1} =
erl_scan_string([$$,1089], 1),
@@ -904,9 +903,9 @@ more_chars() ->
%% OTP-10302. Unicode characters scanner/parser.
otp_10302(Config) when is_list(Config) ->
%% From unicode():
- {error,{1,erl_scan,{illegal,atom}},1} =
+ {ok,[{atom,1,'aсb'}],1} =
erl_scan:string("'a"++[1089]++"b'", 1),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,12}} =
+ {ok,[{atom,{1,1},'qaપ'}],{1,12}} =
erl_scan:string("'qa\\x{aaa}'",{1,1}),
{ok,[{char,1,1089}],1} = erl_scan_string([$$,1089], 1),
diff --git a/lib/stdlib/test/ets_SUITE.erl b/lib/stdlib/test/ets_SUITE.erl
index f68d5eca3f..8581440d58 100644
--- a/lib/stdlib/test/ets_SUITE.erl
+++ b/lib/stdlib/test/ets_SUITE.erl
@@ -22,7 +22,7 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
-export([default/1,setbag/1,badnew/1,verybadnew/1,named/1,keypos2/1,
- privacy/1,privacy_owner/2]).
+ privacy/1]).
-export([empty/1,badinsert/1]).
-export([time_lookup/1,badlookup/1,lookup_order/1]).
-export([delete_elem/1,delete_tab/1,delete_large_tab/1,
@@ -82,27 +82,6 @@
%% Convenience for manual testing
-export([random_test/0]).
-%% internal exports
--export([dont_make_worse_sub/0, make_better_sub1/0, make_better_sub2/0]).
--export([t_repair_continuation_do/1, t_bucket_disappears_do/1,
- select_fail_do/1, whitebox_1/1, whitebox_2/1, t_delete_all_objects_do/1,
- t_delete_object_do/1, t_init_table_do/1, t_insert_list_do/1,
- update_element_opts/1, update_element_opts/4, update_element/4, update_element_do/4,
- update_element_neg/1, update_element_neg_do/1, update_counter_do/1, update_counter_neg/1,
- evil_update_counter_do/1, fixtable_next_do/1, heir_do/1, give_away_do/1, setopts_do/1,
- rename_do/1, rename_unnamed_do/1, interface_equality_do/1, ordered_match_do/1,
- ordered_do/1, privacy_do/1, empty_do/1, badinsert_do/1, time_lookup_do/1,
- lookup_order_do/1, lookup_element_mult_do/1, delete_tab_do/1, delete_elem_do/1,
- match_delete_do/1, match_delete3_do/1, firstnext_do/1,
- slot_do/1, match1_do/1, match2_do/1, match_object_do/1, match_object2_do/1,
- misc1_do/1, safe_fixtable_do/1, info_do/1, dups_do/1, heavy_lookup_do/1,
- heavy_lookup_element_do/1, member_do/1, otp_5340_do/1, otp_7665_do/1, meta_wb_do/1,
- do_heavy_concurrent/1, tab2file2_do/2, exit_large_table_owner_do/2,
- types_do/1, sleeper/0, memory_do/1, update_counter_with_default_do/1,
- update_counter_table_growth_do/1,
- ms_tracee_dummy/1, ms_tracee_dummy/2, ms_tracee_dummy/3, ms_tracee_dummy/4
- ]).
-
-export([t_select_reverse/1]).
-include_lib("common_test/include/ct.hrl").
@@ -228,7 +207,7 @@ memory_check_summary(_Config) ->
%% Test that a disappearing bucket during select of a non-fixed table works.
t_bucket_disappears(Config) when is_list(Config) ->
- repeat_for_opts(t_bucket_disappears_do).
+ repeat_for_opts(fun t_bucket_disappears_do/1).
t_bucket_disappears_do(Opts) ->
EtsMem = etsmem(),
@@ -396,11 +375,16 @@ ms_tracer_collect(Tracee, Ref, Acc) ->
ms_tracee(Parent, CallArgList) ->
Parent ! {self(), ready},
receive start -> ok end,
- lists:foreach(fun(Args) ->
- erlang:apply(?MODULE, ms_tracee_dummy, tuple_to_list(Args))
- end, CallArgList).
-
-
+ F = fun({A1}) ->
+ ms_tracee_dummy(A1);
+ ({A1,A2}) ->
+ ms_tracee_dummy(A1, A2);
+ ({A1,A2,A3}) ->
+ ms_tracee_dummy(A1, A2, A3);
+ ({A1,A2,A3,A4}) ->
+ ms_tracee_dummy(A1, A2, A3, A4)
+ end,
+ lists:foreach(F, CallArgList).
ms_tracee_dummy(_) -> ok.
ms_tracee_dummy(_,_) -> ok.
@@ -418,7 +402,7 @@ assert_eq(A,B) ->
%% Test ets:repair_continuation/2.
t_repair_continuation(Config) when is_list(Config) ->
- repeat_for_opts(t_repair_continuation_do).
+ repeat_for_opts(fun t_repair_continuation_do/1).
t_repair_continuation_do(Opts) ->
@@ -564,7 +548,8 @@ default(Config) when is_list(Config) ->
%% Test that select fails even if nothing can match.
select_fail(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(select_fail_do, [all_types,write_concurrency]),
+ repeat_for_opts(fun select_fail_do/1,
+ [all_types,write_concurrency]),
verify_etsmem(EtsMem).
select_fail_do(Opts) ->
@@ -594,7 +579,7 @@ select_fail_do(Opts) ->
%% Whitebox test of ets:info(X, memory).
memory(Config) when is_list(Config) ->
ok = chk_normal_tab_struct_size(),
- repeat_for_opts(memory_do,[compressed]),
+ repeat_for_opts(fun memory_do/1, [compressed]),
catch erts_debug:set_internal_state(available_internal_state, false).
memory_do(Opts) ->
@@ -704,12 +689,12 @@ adjust_xmem([_T1,_T2,_T3,_T4], {A0,B0,C0,D0} = _Mem0, EstCnt) ->
%% Misc. whitebox tests
t_whitebox(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(whitebox_1),
- repeat_for_opts(whitebox_1),
- repeat_for_opts(whitebox_1),
- repeat_for_opts(whitebox_2),
- repeat_for_opts(whitebox_2),
- repeat_for_opts(whitebox_2),
+ repeat_for_opts(fun whitebox_1/1),
+ repeat_for_opts(fun whitebox_1/1),
+ repeat_for_opts(fun whitebox_1/1),
+ repeat_for_opts(fun whitebox_2/1),
+ repeat_for_opts(fun whitebox_2/1),
+ repeat_for_opts(fun whitebox_2/1),
verify_etsmem(EtsMem).
whitebox_1(Opts) ->
@@ -774,7 +759,7 @@ check_badarg({'EXIT', {badarg, [{M,F,A,_} | _]}}, M, F, Args) ->
%% Test ets:delete_all_objects/1.
t_delete_all_objects(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(t_delete_all_objects_do),
+ repeat_for_opts(fun t_delete_all_objects_do/1),
verify_etsmem(EtsMem).
get_kept_objects(T) ->
@@ -808,7 +793,7 @@ t_delete_all_objects_do(Opts) ->
%% Test ets:delete_object/2.
t_delete_object(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(t_delete_object_do),
+ repeat_for_opts(fun t_delete_object_do/1),
verify_etsmem(EtsMem).
t_delete_object_do(Opts) ->
@@ -881,7 +866,7 @@ make_init_fun(N) ->
%% Test ets:init_table/2.
t_init_table(Config) when is_list(Config)->
EtsMem = etsmem(),
- repeat_for_opts(t_init_table_do),
+ repeat_for_opts(fun t_init_table_do/1),
verify_etsmem(EtsMem).
t_init_table_do(Opts) ->
@@ -957,7 +942,7 @@ t_insert_new(Config) when is_list(Config) ->
%% Test ets:insert/2 with list of objects.
t_insert_list(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(t_insert_list_do),
+ repeat_for_opts(fun t_insert_list_do/1),
verify_etsmem(EtsMem).
t_insert_list_do(Opts) ->
@@ -1187,7 +1172,7 @@ partly_bound(Config) when is_list(Config) ->
end.
dont_make_worse() ->
- seventyfive_percent_success({?MODULE,dont_make_worse_sub,[]},0,0,10).
+ seventyfive_percent_success(fun dont_make_worse_sub/0, 0, 0, 10).
dont_make_worse_sub() ->
T = build_table([a,b],[a,b],15000),
@@ -1199,8 +1184,9 @@ dont_make_worse_sub() ->
ok.
make_better() ->
- fifty_percent_success({?MODULE,make_better_sub2,[]},0,0,10),
- fifty_percent_success({?MODULE,make_better_sub1,[]},0,0,10).
+ fifty_percent_success(fun make_better_sub2/0, 0, 0, 10),
+ fifty_percent_success(fun make_better_sub1/0, 0, 0, 10).
+
make_better_sub1() ->
T = build_table2([a,b],[a,b],15000),
T1 = time_match_object(T,{'_',1500,a,a}, [{{1500,a,a},1500,a,a}]),
@@ -1485,7 +1471,7 @@ do_random_test() ->
%% Ttest various variants of update_element.
update_element(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(update_element_opts),
+ repeat_for_opts(fun update_element_opts/1),
verify_etsmem(EtsMem).
update_element_opts(Opts) ->
@@ -1647,7 +1633,7 @@ update_element_neg_do(T) ->
%% test various variants of update_counter.
update_counter(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(update_counter_do),
+ repeat_for_opts(fun update_counter_do/1),
verify_etsmem(EtsMem).
update_counter_do(Opts) ->
@@ -1868,7 +1854,7 @@ evil_update_counter(Config) when is_list(Config) ->
ordsets:module_info(),
rand:module_info(),
- repeat_for_opts(evil_update_counter_do).
+ repeat_for_opts(fun evil_update_counter_do/1).
evil_update_counter_do(Opts) ->
EtsMem = etsmem(),
@@ -1915,7 +1901,7 @@ evil_counter_1(Iter, T) ->
evil_counter_1(Iter-1, T).
update_counter_with_default(Config) when is_list(Config) ->
- repeat_for_opts(update_counter_with_default_do).
+ repeat_for_opts(fun update_counter_with_default_do/1).
update_counter_with_default_do(Opts) ->
T1 = ets_new(a, [set | Opts]),
@@ -1953,7 +1939,7 @@ update_counter_with_default_do(Opts) ->
ok.
update_counter_table_growth(_Config) ->
- repeat_for_opts(update_counter_table_growth_do).
+ repeat_for_opts(fun update_counter_table_growth_do/1).
update_counter_table_growth_do(Opts) ->
Set = ets_new(b, [set | Opts]),
@@ -1964,7 +1950,8 @@ update_counter_table_growth_do(Opts) ->
%% Check that a first-next sequence always works on a fixed table.
fixtable_next(Config) when is_list(Config) ->
- repeat_for_opts(fixtable_next_do, [write_concurrency,all_types]).
+ repeat_for_opts(fun fixtable_next_do/1,
+ [write_concurrency,all_types]).
fixtable_next_do(Opts) ->
EtsMem = etsmem(),
@@ -2104,7 +2091,7 @@ write_concurrency(Config) when is_list(Config) ->
%% The 'heir' option.
heir(Config) when is_list(Config) ->
- repeat_for_opts(heir_do).
+ repeat_for_opts(fun heir_do/1).
heir_do(Opts) ->
EtsMem = etsmem(),
@@ -2244,7 +2231,7 @@ heir_1(HeirData,Mode,Opts) ->
%% Test ets:give_way/3.
give_away(Config) when is_list(Config) ->
- repeat_for_opts(give_away_do).
+ repeat_for_opts(fun give_away_do/1).
give_away_do(Opts) ->
T = ets_new(foo,[named_table, private | Opts]),
@@ -2325,7 +2312,7 @@ give_away_receiver(T, Giver) ->
%% Test ets:setopts/2.
setopts(Config) when is_list(Config) ->
- repeat_for_opts(setopts_do,[write_concurrency,all_types]).
+ repeat_for_opts(fun setopts_do/1, [write_concurrency,all_types]).
setopts_do(Opts) ->
Self = self(),
@@ -2475,7 +2462,7 @@ bad_table_call(T,{F,Args,_,{return,Return}}) ->
%% Check rename of ets tables.
rename(Config) when is_list(Config) ->
- repeat_for_opts(rename_do, [write_concurrency, all_types]).
+ repeat_for_opts(fun rename_do/1, [write_concurrency, all_types]).
rename_do(Opts) ->
EtsMem = etsmem(),
@@ -2490,7 +2477,8 @@ rename_do(Opts) ->
%% Check rename of unnamed ets table.
rename_unnamed(Config) when is_list(Config) ->
- repeat_for_opts(rename_unnamed_do,[write_concurrency,all_types]).
+ repeat_for_opts(fun rename_unnamed_do/1,
+ [write_concurrency,all_types]).
rename_unnamed_do(Opts) ->
EtsMem = etsmem(),
@@ -2565,7 +2553,7 @@ evil_create_fixed_tab() ->
%% Tests that the return values and errors are equal for set's and
%% ordered_set's where applicable.
interface_equality(Config) when is_list(Config) ->
- repeat_for_opts(interface_equality_do).
+ repeat_for_opts(fun interface_equality_do/1).
interface_equality_do(Opts) ->
EtsMem = etsmem(),
@@ -2629,7 +2617,7 @@ maybe_sort(Any) ->
%% Test match, match_object and match_delete in ordered set's.
ordered_match(Config) when is_list(Config)->
- repeat_for_opts(ordered_match_do).
+ repeat_for_opts(fun ordered_match_do/1).
ordered_match_do(Opts) ->
EtsMem = etsmem(),
@@ -2675,7 +2663,7 @@ ordered_match_do(Opts) ->
%% Test basic functionality in ordered_set's.
ordered(Config) when is_list(Config) ->
- repeat_for_opts(ordered_do).
+ repeat_for_opts(fun ordered_do/1).
ordered_do(Opts) ->
EtsMem = etsmem(),
@@ -2801,12 +2789,13 @@ keypos2(Config) when is_list(Config) ->
%% Privacy check. Check that a named(public/private/protected) table
%% cannot be read by the wrong process(es).
privacy(Config) when is_list(Config) ->
- repeat_for_opts(privacy_do).
+ repeat_for_opts(fun privacy_do/1).
privacy_do(Opts) ->
EtsMem = etsmem(),
process_flag(trap_exit,true),
- Owner = my_spawn_link(?MODULE,privacy_owner,[self(),Opts]),
+ Parent = self(),
+ Owner = my_spawn_link(fun() -> privacy_owner(Parent, Opts) end),
receive
{'EXIT',Owner,Reason} ->
exit({privacy_test,Reason});
@@ -2886,7 +2875,7 @@ rotate_tuple(Tuple, N) ->
%% Check lookup in an empty table and lookup of a non-existing key.
empty(Config) when is_list(Config) ->
- repeat_for_opts(empty_do).
+ repeat_for_opts(fun empty_do/1).
empty_do(Opts) ->
EtsMem = etsmem(),
@@ -2899,7 +2888,7 @@ empty_do(Opts) ->
%% Check proper return values for illegal insert operations.
badinsert(Config) when is_list(Config) ->
- repeat_for_opts(badinsert_do).
+ repeat_for_opts(fun badinsert_do/1).
badinsert_do(Opts) ->
EtsMem = etsmem(),
@@ -2923,7 +2912,7 @@ badinsert_do(Opts) ->
time_lookup(Config) when is_list(Config) ->
%% just for timing, really
EtsMem = etsmem(),
- Values = repeat_for_opts(time_lookup_do),
+ Values = repeat_for_opts(fun time_lookup_do/1),
verify_etsmem(EtsMem),
{comment,lists:flatten(io_lib:format(
"~p ets lookups/s",[Values]))}.
@@ -2957,7 +2946,8 @@ badlookup(Config) when is_list(Config) ->
%% Test that lookup returns objects in order of insertion for bag and dbag.
lookup_order(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(lookup_order_do, [write_concurrency,[bag,duplicate_bag]]),
+ repeat_for_opts(fun lookup_order_do/1,
+ [write_concurrency,[bag,duplicate_bag]]),
verify_etsmem(EtsMem),
ok.
@@ -3048,7 +3038,7 @@ fill_tab(Tab,Val) ->
%% OTP-2386. Multiple return elements.
lookup_element_mult(Config) when is_list(Config) ->
- repeat_for_opts(lookup_element_mult_do).
+ repeat_for_opts(fun lookup_element_mult_do/1).
lookup_element_mult_do(Opts) ->
EtsMem = etsmem(),
@@ -3086,7 +3076,8 @@ lem_crash_3(T) ->
%% Check delete of an element inserted in a `filled' table.
delete_elem(Config) when is_list(Config) ->
- repeat_for_opts(delete_elem_do, [write_concurrency, all_types]).
+ repeat_for_opts(fun delete_elem_do/1,
+ [write_concurrency, all_types]).
delete_elem_do(Opts) ->
EtsMem = etsmem(),
@@ -3103,7 +3094,8 @@ delete_elem_do(Opts) ->
%% Check that ets:delete() works and releases the name of the
%% deleted table.
delete_tab(Config) when is_list(Config) ->
- repeat_for_opts(delete_tab_do,[write_concurrency,all_types]).
+ repeat_for_opts(fun delete_tab_do/1,
+ [write_concurrency,all_types]).
delete_tab_do(Opts) ->
Name = foo,
@@ -3301,10 +3293,14 @@ exit_large_table_owner(Config) when is_list(Config) ->
end, 1)
end,
EtsMem = etsmem(),
- repeat_for_opts({exit_large_table_owner_do,{FEData,Config}}),
+ repeat_for_opts(fun(Opts) ->
+ exit_large_table_owner_do(Opts,
+ FEData,
+ Config)
+ end),
verify_etsmem(EtsMem).
-exit_large_table_owner_do(Opts,{FEData,Config}) ->
+exit_large_table_owner_do(Opts, FEData, Config) ->
verify_rescheduling_exit(Config, FEData, [named_table | Opts], true, 1, 1),
verify_rescheduling_exit(Config, FEData, Opts, false, 1, 1).
@@ -3472,7 +3468,8 @@ baddelete(Config) when is_list(Config) ->
%% Check that match_delete works. Also tests tab2list function.
match_delete(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(match_delete_do,[write_concurrency,all_types]),
+ repeat_for_opts(fun match_delete_do/1,
+ [write_concurrency,all_types]),
verify_etsmem(EtsMem).
match_delete_do(Opts) ->
@@ -3489,7 +3486,7 @@ match_delete_do(Opts) ->
%% OTP-3005: check match_delete with constant argument.
match_delete3(Config) when is_list(Config) ->
- repeat_for_opts(match_delete3_do).
+ repeat_for_opts(fun match_delete3_do/1).
match_delete3_do(Opts) ->
EtsMem = etsmem(),
@@ -3514,7 +3511,7 @@ match_delete3_do(Opts) ->
%% Test ets:first/1 & ets:next/2.
firstnext(Config) when is_list(Config) ->
- repeat_for_opts(firstnext_do).
+ repeat_for_opts(fun firstnext_do/1).
firstnext_do(Opts) ->
EtsMem = etsmem(),
@@ -3572,7 +3569,7 @@ dyn_lookup(T, K) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
slot(Config) when is_list(Config) ->
- repeat_for_opts(slot_do).
+ repeat_for_opts(fun slot_do/1).
slot_do(Opts) ->
EtsMem = etsmem(),
@@ -3597,7 +3594,7 @@ slot_loop(Tab,SlotNo,EltsSoFar) ->
match1(Config) when is_list(Config) ->
- repeat_for_opts(match1_do).
+ repeat_for_opts(fun match1_do/1).
match1_do(Opts) ->
EtsMem = etsmem(),
@@ -3633,7 +3630,7 @@ match1_do(Opts) ->
%% Test match with specified keypos bag table.
match2(Config) when is_list(Config) ->
- repeat_for_opts(match2_do).
+ repeat_for_opts(fun match2_do/1).
match2_do(Opts) ->
EtsMem = etsmem(),
@@ -3660,7 +3657,7 @@ match2_do(Opts) ->
%% Some ets:match_object tests.
match_object(Config) when is_list(Config) ->
- repeat_for_opts(match_object_do).
+ repeat_for_opts(fun match_object_do/1).
match_object_do(Opts) ->
EtsMem = etsmem(),
@@ -3760,7 +3757,7 @@ match_object_do(Opts) ->
%% Tests that db_match_object does not generate a `badarg' when
%% resuming a search with no previous matches.
match_object2(Config) when is_list(Config) ->
- repeat_for_opts(match_object2_do).
+ repeat_for_opts(fun match_object2_do/1).
match_object2_do(Opts) ->
EtsMem = etsmem(),
@@ -3796,7 +3793,7 @@ tab2list(Config) when is_list(Config) ->
%% Simple general small test. If this fails, ets is in really bad
%% shape.
misc1(Config) when is_list(Config) ->
- repeat_for_opts(misc1_do).
+ repeat_for_opts(fun misc1_do/1).
misc1_do(Opts) ->
EtsMem = etsmem(),
@@ -3814,7 +3811,7 @@ misc1_do(Opts) ->
%% Check the safe_fixtable function.
safe_fixtable(Config) when is_list(Config) ->
- repeat_for_opts(safe_fixtable_do).
+ repeat_for_opts(fun safe_fixtable_do/1).
safe_fixtable_do(Opts) ->
EtsMem = etsmem(),
@@ -3872,7 +3869,7 @@ safe_fixtable_do(Opts) ->
%% Tests ets:info result for required tuples.
info(Config) when is_list(Config) ->
- repeat_for_opts(info_do).
+ repeat_for_opts(fun info_do/1).
info_do(Opts) ->
EtsMem = etsmem(),
@@ -3904,7 +3901,7 @@ info_do(Opts) ->
%% Test various duplicate_bags stuff.
dups(Config) when is_list(Config) ->
- repeat_for_opts(dups_do).
+ repeat_for_opts(fun dups_do/1).
dups_do(Opts) ->
EtsMem = etsmem(),
@@ -3970,7 +3967,9 @@ tab2file_do(FName, Opts) ->
%% Check the ets:tab2file function on a filled set/bag type ets table.
tab2file2(Config) when is_list(Config) ->
- repeat_for_opts({tab2file2_do,Config}, [[set,bag],compressed]).
+ repeat_for_opts(fun(Opts) ->
+ tab2file2_do(Opts, Config)
+ end, [[set,bag],compressed]).
tab2file2_do(Opts, Config) ->
EtsMem = etsmem(),
@@ -4234,7 +4233,7 @@ make_sub_binary(List, Num) when is_list(List) ->
%% Perform multiple lookups for every key in a large table.
heavy_lookup(Config) when is_list(Config) ->
- repeat_for_opts(heavy_lookup_do).
+ repeat_for_opts(fun heavy_lookup_do/1).
heavy_lookup_do(Opts) ->
EtsMem = etsmem(),
@@ -4257,7 +4256,7 @@ do_lookup(Tab, N) ->
%% Perform multiple lookups for every element in a large table.
heavy_lookup_element(Config) when is_list(Config) ->
- repeat_for_opts(heavy_lookup_element_do).
+ repeat_for_opts(fun heavy_lookup_element_do/1).
heavy_lookup_element_do(Opts) ->
EtsMem = etsmem(),
@@ -4285,7 +4284,7 @@ do_lookup_element(Tab, N, M) ->
heavy_concurrent(Config) when is_list(Config) ->
ct:timetrap({minutes,30}), %% valgrind needs a lot of time
- repeat_for_opts(do_heavy_concurrent).
+ repeat_for_opts(fun do_heavy_concurrent/1).
do_heavy_concurrent(Opts) ->
Size = 10000,
@@ -4370,7 +4369,7 @@ foldr_ordered(Config) when is_list(Config) ->
%% Test ets:member BIF.
member(Config) when is_list(Config) ->
- repeat_for_opts(member_do, [write_concurrency, all_types]).
+ repeat_for_opts(fun member_do/1, [write_concurrency, all_types]).
member_do(Opts) ->
EtsMem = etsmem(),
@@ -4453,26 +4452,26 @@ time_match(Tab,Match) ->
seventyfive_percent_success(_,S,Fa,0) ->
true = (S > ((S + Fa) * 0.75));
-seventyfive_percent_success({M,F,A},S,Fa,N) ->
- case (catch apply(M,F,A)) of
- {'EXIT', _} ->
- seventyfive_percent_success({M,F,A},S,Fa+1,N-1);
- _ ->
- seventyfive_percent_success({M,F,A},S+1,Fa,N-1)
+seventyfive_percent_success(F, S, Fa, N) when is_function(F, 0) ->
+ try F() of
+ _ ->
+ seventyfive_percent_success(F, S+1, Fa, N-1)
+ catch error:_ ->
+ seventyfive_percent_success(F, S, Fa+1, N-1)
end.
fifty_percent_success(_,S,Fa,0) ->
true = (S > ((S + Fa) * 0.5));
-fifty_percent_success({M,F,A},S,Fa,N) ->
- case (catch apply(M,F,A)) of
- {'EXIT', _} ->
- fifty_percent_success({M,F,A},S,Fa+1,N-1);
- _ ->
- fifty_percent_success({M,F,A},S+1,Fa,N-1)
+fifty_percent_success(F, S, Fa, N) when is_function(F, 0) ->
+ try F() of
+ _ ->
+ fifty_percent_success(F, S+1, Fa, N-1)
+ catch
+ error:_ ->
+ fifty_percent_success(F, S, Fa+1, N-1)
end.
-
create_random_string(0) ->
[];
@@ -4811,7 +4810,7 @@ otp_6338(Config) when is_list(Config) ->
%% Elements could come in the wrong order in a bag if a rehash occurred.
otp_5340(Config) when is_list(Config) ->
- repeat_for_opts(otp_5340_do).
+ repeat_for_opts(fun otp_5340_do/1).
otp_5340_do(Opts) ->
N = 3000,
@@ -4847,7 +4846,7 @@ verify2(_Err, _) ->
%% delete_object followed by delete on fixed bag failed to delete objects.
otp_7665(Config) when is_list(Config) ->
- repeat_for_opts(otp_7665_do).
+ repeat_for_opts(fun otp_7665_do/1).
otp_7665_do(Opts) ->
Tab = ets_new(otp_7665,[bag | Opts]),
@@ -4877,7 +4876,7 @@ otp_7665_act(Tab,Min,Max,DelNr) ->
%% Whitebox testing of meta name table hashing.
meta_wb(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(meta_wb_do),
+ repeat_for_opts(fun meta_wb_do/1),
verify_etsmem(EtsMem).
@@ -5446,7 +5445,7 @@ smp_select_delete(Config) when is_list(Config) ->
%% Test different types.
types(Config) when is_list(Config) ->
init_externals(),
- repeat_for_opts(types_do,[[set,ordered_set],compressed]).
+ repeat_for_opts(fun types_do/1, [[set,ordered_set],compressed]).
types_do(Opts) ->
EtsMem = etsmem(),
@@ -5848,12 +5847,8 @@ log_test_proc(Proc) when is_pid(Proc) ->
Proc.
my_spawn(Fun) -> log_test_proc(spawn(Fun)).
-%%my_spawn(M,F,A) -> log_test_proc(spawn(M,F,A)).
-%%my_spawn(N,M,F,A) -> log_test_proc(spawn(N,M,F,A)).
my_spawn_link(Fun) -> log_test_proc(spawn_link(Fun)).
-my_spawn_link(M,F,A) -> log_test_proc(spawn_link(M,F,A)).
-%%my_spawn_link(N,M,F,A) -> log_test_proc(spawn_link(N,M,F,A)).
my_spawn_opt(Fun,Opts) ->
case spawn_opt(Fun,Opts) of
@@ -6096,7 +6091,7 @@ make_port() ->
open_port({spawn, "efile"}, [eof]).
make_pid() ->
- spawn_link(?MODULE, sleeper, []).
+ spawn_link(fun sleeper/0).
sleeper() ->
receive after infinity -> ok end.
@@ -6232,11 +6227,7 @@ make_unaligned_sub_binary(List) ->
repeat_for_opts(F) ->
repeat_for_opts(F, [write_concurrency, read_concurrency, compressed]).
-repeat_for_opts(F, OptGenList) when is_atom(F) ->
- repeat_for_opts(fun(Opts) -> ?MODULE:F(Opts) end, OptGenList);
-repeat_for_opts({F,Args}, OptGenList) when is_atom(F) ->
- repeat_for_opts(fun(Opts) -> ?MODULE:F(Opts,Args) end, OptGenList);
-repeat_for_opts(F, OptGenList) ->
+repeat_for_opts(F, OptGenList) when is_function(F, 1) ->
repeat_for_opts(F, OptGenList, []).
repeat_for_opts(F, [], Acc) ->
diff --git a/lib/stdlib/test/ets_tough_SUITE.erl b/lib/stdlib/test/ets_tough_SUITE.erl
index 49aba7a529..0abce3200f 100644
--- a/lib/stdlib/test/ets_tough_SUITE.erl
+++ b/lib/stdlib/test/ets_tough_SUITE.erl
@@ -19,10 +19,15 @@
%%
-module(ets_tough_SUITE).
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
- init_per_group/2,end_per_group/2,ex1/1]).
--export([init/1,terminate/2,handle_call/3,handle_info/2]).
+ init_per_group/2,end_per_group/2,
+ ex1/1]).
-export([init_per_testcase/2, end_per_testcase/2]).
--compile([export_all]).
+
+%% gen_server behavior.
+-behavior(gen_server).
+-export([init/1,terminate/2,handle_call/3,handle_cast/2,
+ handle_info/2,code_change/3]).
+
-include_lib("common_test/include/ct.hrl").
suite() ->
@@ -235,33 +240,6 @@ random_element(T) ->
I = rand:uniform(tuple_size(T)),
element(I,T).
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-show_table(N) ->
- FileName = ["etsdump.",integer_to_list(N)],
- case file:open(FileName,read) of
- {ok,Fd} ->
- show_entries(Fd);
- _ ->
- error
- end.
-
-show_entries(Fd) ->
- case phys_read_len(Fd) of
- {ok,Len} ->
- case phys_read_entry(Fd,Len) of
- {ok,ok} ->
- ok;
- {ok,{Key,Val}} ->
- io:format("~w\n",[{Key,Val}]),
- show_entries(Fd);
- _ ->
- error
- end;
- _ ->
- error
- end.
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -378,20 +356,6 @@ dget_class(ServerPid,Class,Condition) ->
derase_class(ServerPid,Class) ->
gen_server:call(ServerPid,{handle_delete_class,Class}, infinity).
-%%% dmodify(ServerPid,Application) -> ok
-%%%
-%%% Applies a function on every instance in the database.
-%%% The user provided function must always return one of the
-%%% terms {ok,NewItem}, true, or false.
-%%% Aug 96, this is only used to reset all timestamp values
-%%% in the database.
-%%% The function is supplied as Application = {Mod, Fun, ExtraArgs},
-%%% where the instance will be prepended to ExtraArgs before each
-%%% call is made.
-
-dmodify(ServerPid,Application) ->
- gen_server:call(ServerPid,{handle_dmodify,Application}, infinity).
-
%%% ddump_first(ServerPid,DumpDir) -> {dump_more,Ticket} | already_dumping
%%%
%%% Starts dumping the database. This call redirects all database updates
@@ -643,9 +607,15 @@ handle_call(stop,_From,Admin) ->
?ets_delete(Admin), % Make sure table is gone before reply is sent.
{stop, normal, ok, []}.
+handle_cast(_Req, Admin) ->
+ {noreply, Admin}.
+
handle_info({'EXIT',_Pid,_Reason},Admin) ->
{stop,normal,Admin}.
+code_change(_OldVsn, StateData, _Extra) ->
+ {ok, StateData}.
+
handle_delete(Class, Key, Admin) ->
handle_call({handle_delete,Class,Key},from,Admin).
diff --git a/lib/stdlib/test/filelib_SUITE.erl b/lib/stdlib/test/filelib_SUITE.erl
index 4f8936edbf..87fba815d2 100644
--- a/lib/stdlib/test/filelib_SUITE.erl
+++ b/lib/stdlib/test/filelib_SUITE.erl
@@ -25,7 +25,8 @@
init_per_testcase/2,end_per_testcase/2,
wildcard_one/1,wildcard_two/1,wildcard_errors/1,
fold_files/1,otp_5960/1,ensure_dir_eexist/1,ensure_dir_symlink/1,
- wildcard_symlink/1, is_file_symlink/1, file_props_symlink/1]).
+ wildcard_symlink/1, is_file_symlink/1, file_props_symlink/1,
+ find_source/1]).
-import(lists, [foreach/2]).
@@ -45,7 +46,8 @@ suite() ->
all() ->
[wildcard_one, wildcard_two, wildcard_errors,
fold_files, otp_5960, ensure_dir_eexist, ensure_dir_symlink,
- wildcard_symlink, is_file_symlink, file_props_symlink].
+ wildcard_symlink, is_file_symlink, file_props_symlink,
+ find_source].
groups() ->
[].
@@ -503,3 +505,52 @@ file_props_symlink(Config) ->
FileSize = filelib:file_size(Alias, erl_prim_loader),
FileSize = filelib:file_size(Alias, prim_file)
end.
+
+find_source(Config) when is_list(Config) ->
+ BeamFile = code:which(lists),
+ BeamName = filename:basename(BeamFile),
+ BeamDir = filename:dirname(BeamFile),
+ SrcName = filename:basename(BeamFile, ".beam") ++ ".erl",
+
+ {ok, BeamFile} = filelib:find_file(BeamName, BeamDir),
+ {ok, BeamFile} = filelib:find_file(BeamName, BeamDir, []),
+ {ok, BeamFile} = filelib:find_file(BeamName, BeamDir, [{"",""},{"ebin","src"}]),
+ {error, not_found} = filelib:find_file(BeamName, BeamDir, [{"ebin","src"}]),
+
+ {ok, SrcFile} = filelib:find_file(SrcName, BeamDir),
+ {ok, SrcFile} = filelib:find_file(SrcName, BeamDir, []),
+ {ok, SrcFile} = filelib:find_file(SrcName, BeamDir, [{"foo","bar"},{"ebin","src"}]),
+ {error, not_found} = filelib:find_file(SrcName, BeamDir, [{"",""}]),
+
+ {ok, SrcFile} = filelib:find_source(BeamFile),
+ {ok, SrcFile} = filelib:find_source(BeamName, BeamDir),
+ {ok, SrcFile} = filelib:find_source(BeamName, BeamDir,
+ [{".erl",".yrl",[{"",""}]},
+ {".beam",".erl",[{"ebin","src"}]}]),
+ {error, not_found} = filelib:find_source(BeamName, BeamDir,
+ [{".erl",".yrl",[{"",""}]}]),
+
+ {ok, ParserErl} = filelib:find_source(code:which(erl_parse)),
+ {ok, ParserYrl} = filelib:find_source(ParserErl),
+ "lry." ++ _ = lists:reverse(ParserYrl),
+ {ok, ParserYrl} = filelib:find_source(ParserErl,
+ [{".beam",".erl",[{"ebin","src"}]},
+ {".erl",".yrl",[{"",""}]}]),
+
+ %% find_source automatically checks the local directory regardless of rules
+ {ok, ParserYrl} = filelib:find_source(ParserErl),
+ {ok, ParserYrl} = filelib:find_source(ParserErl,
+ [{".beam",".erl",[{"ebin","src"}]}]),
+
+ %% find_file does not check the local directory unless in the rules
+ ParserYrlName = filename:basename(ParserYrl),
+ ParserYrlDir = filename:dirname(ParserYrl),
+ {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir,
+ [{"",""}]),
+ {error, not_found} = filelib:find_file(ParserYrlName, ParserYrlDir,
+ [{"ebin","src"}]),
+
+ %% local directory is in the default list for find_file
+ {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir),
+ {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir, []),
+ ok.
diff --git a/lib/stdlib/test/filename_SUITE.erl b/lib/stdlib/test/filename_SUITE.erl
index b7c4d3a6e5..dc3daa56c1 100644
--- a/lib/stdlib/test/filename_SUITE.erl
+++ b/lib/stdlib/test/filename_SUITE.erl
@@ -29,6 +29,7 @@
dirname_bin/1, extension_bin/1, join_bin/1, t_nativename_bin/1]).
-export([pathtype_bin/1,rootname_bin/1,split_bin/1]).
-export([t_basedir_api/1, t_basedir_xdg/1, t_basedir_windows/1]).
+-export([safe_relative_path/1]).
-include_lib("common_test/include/ct.hrl").
@@ -41,7 +42,8 @@ all() ->
find_src,
absname_bin, absname_bin_2,
{group,p},
- t_basedir_xdg, t_basedir_windows].
+ t_basedir_xdg, t_basedir_windows,
+ safe_relative_path].
groups() ->
[{p, [parallel],
@@ -421,8 +423,10 @@ t_nativename(Config) when is_list(Config) ->
find_src(Config) when is_list(Config) ->
{Source,_} = filename:find_src(file),
["file"|_] = lists:reverse(filename:split(Source)),
- {_,_} = filename:find_src(init, [{".","."}, {"ebin","src"}]),
-
+ {Source,_} = filename:find_src(file, [{"",""}, {"ebin","src"}]),
+ {Source,_} = filename:find_src(Source),
+ {Source,_} = filename:find_src(Source ++ ".erl"),
+
%% Try to find the source for a preloaded module.
{error,{preloaded,init}} = filename:find_src(init),
@@ -768,6 +772,71 @@ t_nativename_bin(Config) when is_list(Config) ->
filename:nativename(<<"/usr/tmp//arne/">>)
end.
+safe_relative_path(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ Root = filename:join(PrivDir, ?FUNCTION_NAME),
+ ok = file:make_dir(Root),
+ ok = file:set_cwd(Root),
+
+ ok = file:make_dir("a"),
+ ok = file:set_cwd("a"),
+ ok = file:make_dir("b"),
+ ok = file:set_cwd("b"),
+ ok = file:make_dir("c"),
+
+ ok = file:set_cwd(Root),
+
+ "a" = test_srp("a"),
+ "a/b" = test_srp("a/b"),
+ "a/b" = test_srp("a/./b"),
+ "a/b" = test_srp("a/./b/."),
+
+ "" = test_srp("a/.."),
+ "" = test_srp("a/./.."),
+ "" = test_srp("a/../."),
+ "a" = test_srp("a/b/.."),
+ "a" = test_srp("a/../a"),
+ "a" = test_srp("a/../a/../a"),
+ "a/b/c" = test_srp("a/../a/b/c"),
+
+ unsafe = test_srp("a/../.."),
+ unsafe = test_srp("a/../../.."),
+ unsafe = test_srp("a/./../.."),
+ unsafe = test_srp("a/././../../.."),
+ unsafe = test_srp("a/b/././../../.."),
+
+ unsafe = test_srp(PrivDir), %Absolute path.
+
+ ok.
+
+test_srp(RelPath) ->
+ Res = do_test_srp(RelPath),
+ Res = case do_test_srp(list_to_binary(RelPath)) of
+ Bin when is_binary(Bin) ->
+ binary_to_list(Bin);
+ Other ->
+ Other
+ end.
+
+do_test_srp(RelPath) ->
+ {ok,Root} = file:get_cwd(),
+ ok = file:set_cwd(RelPath),
+ {ok,Cwd} = file:get_cwd(),
+ ok = file:set_cwd(Root),
+ case filename:safe_relative_path(RelPath) of
+ unsafe ->
+ true = length(Cwd) < length(Root),
+ unsafe;
+ "" ->
+ "";
+ SafeRelPath ->
+ ok = file:set_cwd(SafeRelPath),
+ {ok,Cwd} = file:get_cwd(),
+ true = length(Cwd) >= length(Root),
+ ok = file:set_cwd(Root),
+ SafeRelPath
+ end.
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% basedirs
t_basedir_api(Config) when is_list(Config) ->
diff --git a/lib/stdlib/test/gen_statem_SUITE.erl b/lib/stdlib/test/gen_statem_SUITE.erl
index 8f2ba0cab2..ac27c9fc79 100644
--- a/lib/stdlib/test/gen_statem_SUITE.erl
+++ b/lib/stdlib/test/gen_statem_SUITE.erl
@@ -38,7 +38,7 @@ all() ->
{group, abnormal},
{group, abnormal_handle_event},
shutdown, stop_and_reply, state_enter, event_order,
- state_timeout, code_change,
+ state_timeout, event_types, code_change,
{group, sys},
hibernate, enter_loop].
@@ -600,15 +600,26 @@ state_enter(_Config) ->
(internal, Prev, N) ->
Self ! {internal,start,Prev,N},
{keep_state,N + 1};
+ ({call,From}, repeat, N) ->
+ {repeat_state,N + 1,
+ [{reply,From,{repeat,start,N}}]};
({call,From}, echo, N) ->
- {next_state,wait,N + 1,{reply,From,{echo,start,N}}};
+ {next_state,wait,N + 1,
+ {reply,From,{echo,start,N}}};
({call,From}, {stop,Reason}, N) ->
- {stop_and_reply,Reason,[{reply,From,{stop,N}}],N + 1}
+ {stop_and_reply,Reason,
+ [{reply,From,{stop,N}}],N + 1}
end,
wait =>
- fun (enter, Prev, N) ->
+ fun (enter, Prev, N) when N < 5 ->
+ {repeat_state,N + 1,
+ {reply,{Self,N},{enter,Prev}}};
+ (enter, Prev, N) ->
Self ! {enter,wait,Prev,N},
{keep_state,N + 1};
+ ({call,From}, repeat, N) ->
+ {repeat_state_and_data,
+ [{reply,From,{repeat,wait,N}}]};
({call,From}, echo, N) ->
{next_state,start,N + 1,
[{next_event,internal,wait},
@@ -620,11 +631,15 @@ state_enter(_Config) ->
[{enter,start,start,1}] = flush(),
{echo,start,2} = gen_statem:call(STM, echo),
- [{enter,wait,start,3}] = flush(),
- {wait,[4|_]} = sys:get_state(STM),
- {echo,wait,4} = gen_statem:call(STM, echo),
- [{enter,start,wait,5},{internal,start,wait,6}] = flush(),
- {stop,7} = gen_statem:call(STM, {stop,bye}),
+ [{3,{enter,start}},{4,{enter,start}},{enter,wait,start,5}] = flush(),
+ {wait,[6|_]} = sys:get_state(STM),
+ {repeat,wait,6} = gen_statem:call(STM, repeat),
+ [{enter,wait,wait,6}] = flush(),
+ {echo,wait,7} = gen_statem:call(STM, echo),
+ [{enter,start,wait,8},{internal,start,wait,9}] = flush(),
+ {repeat,start,10} = gen_statem:call(STM, repeat),
+ [{enter,start,start,11}] = flush(),
+ {stop,12} = gen_statem:call(STM, {stop,bye}),
[{'EXIT',STM,bye}] = flush(),
{noproc,_} =
@@ -801,6 +816,74 @@ state_timeout(_Config) ->
+%% Test that all event types can be sent with {next_event,EventType,_}
+event_types(_Config) ->
+ process_flag(trap_exit, true),
+
+ Machine =
+ %% Abusing the internal format of From...
+ #{init =>
+ fun () ->
+ {ok, start, undefined}
+ end,
+ start =>
+ fun ({call,_} = Call, Req, undefined) ->
+ {next_state, state1, undefined,
+ [{next_event,internal,1},
+ {next_event,state_timeout,2},
+ {next_event,timeout,3},
+ {next_event,info,4},
+ {next_event,cast,5},
+ {next_event,Call,Req}]}
+ end,
+ state1 =>
+ fun (internal, 1, undefined) ->
+ {next_state, state2, undefined}
+ end,
+ state2 =>
+ fun (state_timeout, 2, undefined) ->
+ {next_state, state3, undefined}
+ end,
+ state3 =>
+ fun (timeout, 3, undefined) ->
+ {next_state, state4, undefined}
+ end,
+ state4 =>
+ fun (info, 4, undefined) ->
+ {next_state, state5, undefined}
+ end,
+ state5 =>
+ fun (cast, 5, undefined) ->
+ {next_state, state6, undefined}
+ end,
+ state6 =>
+ fun ({call,From}, stop, undefined) ->
+ {stop_and_reply, shutdown,
+ [{reply,From,stopped}]}
+ end},
+ {ok,STM} =
+ gen_statem:start_link(
+ ?MODULE, {map_statem,Machine,[]}, [{debug,[trace]}]),
+
+ stopped = gen_statem:call(STM, stop),
+ receive
+ {'EXIT',STM,shutdown} ->
+ ok
+ after 500 ->
+ ct:fail(did_not_stop)
+ end,
+
+ {noproc,_} =
+ ?EXPECT_FAILURE(gen_statem:call(STM, hej), Reason),
+ case flush() of
+ [] ->
+ ok;
+ Other2 ->
+ ct:fail({unexpected,Other2})
+ end.
+
+
+
sys1(Config) ->
{ok,Pid} = gen_statem:start(?MODULE, start_arg(Config, []), []),
{status, Pid, {module,gen_statem}, _} = sys:get_status(Pid),
@@ -1722,6 +1805,10 @@ handle_event(
{keep_state,[NewData|Machine]};
{keep_state,NewData,Ops} ->
{keep_state,[NewData|Machine],Ops};
+ {repeat_state,NewData} ->
+ {repeat_state,[NewData|Machine]};
+ {repeat_state,NewData,Ops} ->
+ {repeat_state,[NewData|Machine],Ops};
Other ->
Other
end;
diff --git a/lib/stdlib/test/io_SUITE.erl b/lib/stdlib/test/io_SUITE.erl
index 7d48cbc97c..d546e8fad2 100644
--- a/lib/stdlib/test/io_SUITE.erl
+++ b/lib/stdlib/test/io_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1999-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1999-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -30,7 +30,7 @@
io_lib_print_binary_depth_one/1, otp_10302/1, otp_10755/1,
otp_10836/1, io_lib_width_too_small/1,
io_with_huge_message_queue/1, format_string/1,
- maps/1, coverage/1]).
+ maps/1, coverage/1, otp_14178_unicode_atoms/1, otp_14175/1]).
-export([pretty/2]).
@@ -61,7 +61,7 @@ all() ->
printable_range, bad_printable_range,
io_lib_print_binary_depth_one, otp_10302, otp_10755, otp_10836,
io_lib_width_too_small, io_with_huge_message_queue,
- format_string, maps, coverage].
+ format_string, maps, coverage, otp_14178_unicode_atoms, otp_14175].
%% Error cases for output.
error_1(Config) when is_list(Config) ->
@@ -415,13 +415,13 @@ otp_6354(Config) when is_list(Config) ->
bt(<<"#rrrrr{\n"
" f1 = 1,\n"
" f2 = #rrrrr{f1 = a,f2 = b,f3 = c},\n"
- " f3 = \n"
+ " f3 =\n"
" #rrrrr{\n"
" f1 = h,f2 = i,\n"
- " f3 = \n"
+ " f3 =\n"
" #rrrrr{\n"
" f1 = aa,\n"
- " f2 = \n"
+ " f2 =\n"
" #rrrrr{\n"
" f1 = #rrrrr{f1 = a,f2 = b,f3 = c},\n"
" f2 = 2,f3 = 3},\n"
@@ -431,17 +431,17 @@ otp_6354(Config) when is_list(Config) ->
2,3},bb}}},
-1)),
bt(<<"#d{aaaaaaaaaaaaaaaaaaaa = 1,\n"
- " bbbbbbbbbbbbbbbbbbbb = \n"
+ " bbbbbbbbbbbbbbbbbbbb =\n"
" #d{aaaaaaaaaaaaaaaaaaaa = a,bbbbbbbbbbbbbbbbbbbb = b,\n"
" cccccccccccccccccccc = c,dddddddddddddddddddd = d,\n"
" eeeeeeeeeeeeeeeeeeee = e},\n"
" cccccccccccccccccccc = 3,\n"
- " dddddddddddddddddddd = \n"
+ " dddddddddddddddddddd =\n"
" #d{aaaaaaaaaaaaaaaaaaaa = h,bbbbbbbbbbbbbbbbbbbb = i,\n"
- " cccccccccccccccccccc = \n"
+ " cccccccccccccccccccc =\n"
" #d{aaaaaaaaaaaaaaaaaaaa = aa,"
"bbbbbbbbbbbbbbbbbbbb = bb,\n"
- " cccccccccccccccccccc = \n"
+ " cccccccccccccccccccc =\n"
" #d{aaaaaaaaaaaaaaaaaaaa = 1,"
"bbbbbbbbbbbbbbbbbbbb = 2,\n"
" cccccccccccccccccccc = 3,"
@@ -534,21 +534,21 @@ otp_6354(Config) when is_list(Config) ->
p({A,{A,{A,{A,{A,{A,{A,
{g,{h,{i,{j,{k,{l,{m,{n,{o,{a}}}}}}}}}}}}}}}}}, 100)),
bt(<<"#c{\n"
- " f1 = \n"
+ " f1 =\n"
" #c{\n"
- " f1 = \n"
+ " f1 =\n"
" #c{\n"
- " f1 = \n"
+ " f1 =\n"
" #c{\n"
- " f1 = \n"
+ " f1 =\n"
" #c{\n"
- " f1 = \n"
+ " f1 =\n"
" #c{\n"
- " f1 = \n"
+ " f1 =\n"
" #c{\n"
- " f1 = \n"
+ " f1 =\n"
" #c{\n"
- " f1 = \n"
+ " f1 =\n"
" #c{\n"
" f1 = #c{f1 = #c{f1 = #c{f1 = a,"
"f2 = b},f2 = b},f2 = b},\n"
@@ -564,13 +564,13 @@ otp_6354(Config) when is_list(Config) ->
p({c,{c,{c,{c,{c,{c,{c,{c,{c,{c,{c,{c,a,b},b},b},b},b},b},
b},b},b},b},b},b}, -1)),
bt(<<"#rrrrr{\n"
- " f1 = \n"
+ " f1 =\n"
" #rrrrr{\n"
- " f1 = \n"
+ " f1 =\n"
" #rrrrr{\n"
- " f1 = \n"
+ " f1 =\n"
" #rrrrr{\n"
- " f1 = \n"
+ " f1 =\n"
" {rrrrr,{rrrrr,a,#rrrrr{f1 = {rrrrr,1,2},f2 = a,"
"f3 = b}},b},\n"
" f2 = {rrrrr,c,d},\n"
@@ -2106,3 +2106,221 @@ coverage(_Config) ->
io:format("~s\n", [S2]),
ok.
+
+%% Test UTF-8 atoms.
+otp_14178_unicode_atoms(_Config) ->
+ "atom" = fmt("~ts", ['atom']),
+ "кирилли́ческий атом" = fmt("~ts", ['кирилли́ческий атом']),
+ [16#10FFFF] = fmt("~ts", ['\x{10FFFF}']),
+
+ %% ~s must not accept code points greater than 255.
+ bad_io_lib_format("~s", ['\x{100}']),
+ bad_io_lib_format("~s", ['кирилли́ческий атом']),
+
+ ok.
+
+bad_io_lib_format(F, S) ->
+ try io_lib:format(F, S) of
+ _ ->
+ ct:fail({should_fail,F,S})
+ catch
+ error:badarg ->
+ ok
+ end.
+
+otp_14175(_Config) ->
+ "..." = p(#{}, 0),
+ "#{}" = p(#{}, 1),
+ "#{...}" = p(#{a => 1}, 1),
+ "#{#{} => a}" = p(#{#{} => a}, 2),
+ "#{a => 1,...}" = p(#{a => 1, b => 2}, 2),
+ "#{a => 1,b => 2}" = p(#{a => 1, b => 2}, -1),
+
+ M = #{kaaaaaaaaaaaaaaaaaaa => v1,kbbbbbbbbbbbbbbbbbbb => v2,
+ kccccccccccccccccccc => v3,kddddddddddddddddddd => v4,
+ keeeeeeeeeeeeeeeeeee => v5},
+ "#{...}" = p(M, 1),
+ mt("#{kaaaaaaaaaaaaaaaaaaaa => v1,...}", p(M, 2)),
+ mt("#{kaaaaaaaaaaaaaaaaaaaa => 1,kbbbbbbbbbbbbbbbbbbbb => 2,...}",
+ p(M, 3)),
+
+ mt("#{kaaaaaaaaaaaaaaaaaaa => v1,kbbbbbbbbbbbbbbbbbbb => v2,\n"
+ " kccccccccccccccccccc => v3,...}", p(M, 4)),
+
+ mt("#{kaaaaaaaaaaaaaaaaaaa => v1,kbbbbbbbbbbbbbbbbbbb => v2,\n"
+ " kccccccccccccccccccc => v3,kddddddddddddddddddd => v4,...}",
+ p(M, 5)),
+
+ mt("#{kaaaaaaaaaaaaaaaaaaa => v1,kbbbbbbbbbbbbbbbbbbb => v2,\n"
+ " kccccccccccccccccccc => v3,kddddddddddddddddddd => v4,\n"
+ " keeeeeeeeeeeeeeeeeee => v5}", p(M, 6)),
+
+ weak("#{aaaaaaaaaaaaaaaaaaa => 1,bbbbbbbbbbbbbbbbbbbb => 2,\n"
+ " cccccccccccccccccccc => {3},\n"
+ " dddddddddddddddddddd => 4,eeeeeeeeeeeeeeeeeeee => 5}",
+ p(#{aaaaaaaaaaaaaaaaaaa => 1,bbbbbbbbbbbbbbbbbbbb => 2,
+ cccccccccccccccccccc => {3},
+ dddddddddddddddddddd => 4,eeeeeeeeeeeeeeeeeeee => 5}, -1)),
+
+ M2 = #{dddddddddddddddddddd => {1}, {aaaaaaaaaaaaaaaaaaaa} => 2,
+ {bbbbbbbbbbbbbbbbbbbb} => 3,{cccccccccccccccccccc} => 4,
+ {eeeeeeeeeeeeeeeeeeee} => 5},
+ "#{...}" = p(M2, 1),
+ weak("#{dddddddddddddddddddd => {...},...}", p(M2, 2)),
+ weak("#{dddddddddddddddddddd => {1},{...} => 2,...}", p(M2, 3)),
+
+ weak("#{dddddddddddddddddddd => {1},\n"
+ " {aaaaaaaaaaaaaaaaaaaa} => 2,\n"
+ " {...} => 3,...}", p(M2, 4)),
+
+ weak("#{dddddddddddddddddddd => {1},\n"
+ " {aaaaaaaaaaaaaaaaaaaa} => 2,\n"
+ " {bbbbbbbbbbbbbbbbbbbb} => 3,\n"
+ " {...} => 4,...}", p(M2, 5)),
+
+ weak("#{dddddddddddddddddddd => {1},\n"
+ " {aaaaaaaaaaaaaaaaaaaa} => 2,\n"
+ " {bbbbbbbbbbbbbbbbbbbb} => 3,\n"
+ " {cccccccccccccccccccc} => 4,\n"
+ " {...} => 5}", p(M2, 6)),
+
+ weak("#{dddddddddddddddddddd => {1},\n"
+ " {aaaaaaaaaaaaaaaaaaaa} => 2,\n"
+ " {bbbbbbbbbbbbbbbbbbbb} => 3,\n"
+ " {cccccccccccccccccccc} => 4,\n"
+ " {eeeeeeeeeeeeeeeeeeee} => 5}", p(M2, 7)),
+
+ M3 = #{kaaaaaaaaaaaaaaaaaaa => vuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuu,
+ kbbbbbbbbbbbbbbbbbbb => vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv,
+ kccccccccccccccccccc => vxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,
+ kddddddddddddddddddd => vyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,
+ keeeeeeeeeeeeeeeeeee => vzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz},
+
+ mt("#{aaaaaaaaaaaaaaaaaaaa =>\n"
+ " uuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuuu,\n"
+ " bbbbbbbbbbbbbbbbbbbb =>\n"
+ " vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv,\n"
+ " cccccccccccccccccccc =>\n"
+ " xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx,\n"
+ " dddddddddddddddddddd =>\n"
+ " yyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy,\n"
+ " eeeeeeeeeeeeeeeeeeee =>\n"
+ " zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz}", p(M3, -1)),
+
+ R4 = {c,{c,{c,{c,{c,{c,{c,{c,{c,{c,{c,{c,a,b},b},b},b},b},b},
+ b},b},b},b},b},b},
+ M4 = #{aaaaaaaaaaaaaaaaaaaa => R4,
+ bbbbbbbbbbbbbbbbbbbb => R4,
+ cccccccccccccccccccc => R4,
+ dddddddddddddddddddd => R4,
+ eeeeeeeeeeeeeeeeeeee => R4},
+
+ weak("#{aaaaaaaaaaaaaaaaaaaa =>\n"
+ " #c{f1 = #c{f1 = #c{...},f2 = b},f2 = b},\n"
+ " bbbbbbbbbbbbbbbbbbbb => #c{f1 = #c{f1 = {...},...},f2 = b},\n"
+ " cccccccccccccccccccc => #c{f1 = #c{...},f2 = b},\n"
+ " dddddddddddddddddddd => #c{f1 = {...},...},\n"
+ " eeeeeeeeeeeeeeeeeeee => #c{...}}", p(M4, 7)),
+
+ M5 = #{aaaaaaaaaaaaaaaaaaaa => R4},
+ mt("#{aaaaaaaaaaaaaaaaaaaa =>\n"
+ " #c{\n"
+ " f1 =\n"
+ " #c{\n"
+ " f1 =\n"
+ " #c{\n"
+ " f1 =\n"
+ " #c{\n"
+ " f1 =\n"
+ " #c{\n"
+ " f1 =\n"
+ " #c{\n"
+ " f1 =\n"
+ " #c{\n"
+ " f1 =\n"
+ " #c{\n"
+ " f1 =\n"
+ " #c{\n"
+ " f1 = #c{f1 = #c{f1 = #c{f1 = a,f2 = b},f2 = b},"
+ "f2 = b},\n"
+ " f2 = b},\n"
+ " f2 = b},\n"
+ " f2 = b},\n"
+ " f2 = b},\n"
+ " f2 = b},\n"
+ " f2 = b},\n"
+ " f2 = b},\n"
+ " f2 = b},\n"
+ " f2 = b}}", p(M5, -1)),
+ ok.
+
+%% Just check number of newlines and dots ('...').
+-define(WEAK, true).
+
+-ifdef(WEAK).
+
+weak(S, R) ->
+ (nl(S) =:= nl(R) andalso
+ dots(S) =:= dots(S)).
+
+nl(S) ->
+ [C || C <- S, C =:= $\n].
+
+dots(S) ->
+ [C || C <- S, C =:= $\.].
+
+-else. % WEAK
+
+weak(S, R) ->
+ mt(S, R).
+
+-endif. % WEAK
+
+%% If EXACT is defined: mt() matches strings exactly.
+%%
+%% if EXACT is not defined: do not match the strings exactly, but
+%% compare them assuming that all map keys and all map values are
+%% equal (by assuming all map keys and all map values have the same
+%% length and begin with $k and $v respectively).
+
+%-define(EXACT, true).
+
+-ifdef(EXACT).
+
+mt(S, R) ->
+ S =:= R.
+
+-else. % EXACT
+
+mt(S, R) ->
+ anon(S) =:= anon(R).
+
+anon(S) ->
+ {ok, Ts0, _} = erl_scan:string(S, 1, [text]),
+ Ts = anon1(Ts0),
+ text(Ts).
+
+anon1([]) -> [];
+anon1([{atom,Anno,Atom}=T|Ts]) ->
+ case erl_anno:text(Anno) of
+ "k" ++ _ ->
+ NewAnno = erl_anno:set_text("key", Anno),
+ [{atom,NewAnno,Atom}|anon1(Ts)];
+ "v" ++ _ ->
+ NewAnno = erl_anno:set_text("val", Anno),
+ [{atom,NewAnno,Atom}|anon1(Ts)];
+ _ ->
+ [T|anon1(Ts)]
+ end;
+anon1([T|Ts]) ->
+ [T|anon1(Ts)].
+
+text(Ts) ->
+ lists:append(text1(Ts)).
+
+text1([]) -> [];
+text1([T|Ts]) ->
+ Anno = element(2, T),
+ [erl_anno:text(Anno) | text1(Ts)].
+
+-endif. % EXACT
diff --git a/lib/stdlib/test/lists_SUITE.erl b/lib/stdlib/test/lists_SUITE.erl
index 531e97e8d6..5f2d8f0f4e 100644
--- a/lib/stdlib/test/lists_SUITE.erl
+++ b/lib/stdlib/test/lists_SUITE.erl
@@ -121,7 +121,7 @@ groups() ->
{zip, [parallel], [zip_unzip, zip_unzip3, zipwith, zipwith3]},
{misc, [parallel], [reverse, member, dropwhile, takewhile,
filter_partition, suffix, subtract, join,
- hof]}
+ hof, droplast]}
].
init_per_suite(Config) ->
diff --git a/lib/stdlib/test/qlc_SUITE.erl b/lib/stdlib/test/qlc_SUITE.erl
index c08e138ad3..2b5d52287e 100644
--- a/lib/stdlib/test/qlc_SUITE.erl
+++ b/lib/stdlib/test/qlc_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2004-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2004-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -886,11 +886,12 @@ eval_unique(Config) when is_list(Config) ->
[a] = qlc:e(Q2, {unique_all, true})
">>,
- <<"Q = qlc:q([SQV || SQV <- qlc:q([X || X <- [1,2,1]],unique)],
+ <<"Q = qlc:q([SQV || SQV <- qlc:q([X || X <- [1,2,1,#{a => 1}]],
+ unique)],
unique),
{call,_,_,[{lc,_,{var,_,'X'},[{generate,_,{var,_,'X'},_}]},_]} =
qlc:info(Q, [{format,abstract_code},unique_all]),
- [1,2] = qlc:e(Q)">>,
+ [1,2,#{a := 1}] = qlc:e(Q)">>,
<<"Q = qlc:q([X || X <- [1,2,1]]),
{call,_,_,[{lc,_,{var,_,'X'},[{generate,_,{var,_,'X'},_}]},_]} =
@@ -2637,7 +2638,16 @@ info(Config) when is_list(Config) ->
{cons, _, _, _}]},
{nil,_}}]}]} = i(QH, {format, abstract_code}),
[{5},{6}] = qlc:e(QH),
- [{4},{5},{6}] = qlc:e(F(3))">>
+ [{4},{5},{6}] = qlc:e(F(3))">>,
+
+ <<"Fun = fun ?MODULE:i/2,
+ L = [{#{k => #{v => Fun}}, Fun}],
+ H = qlc:q([Q || Q <- L, Q =:= {#{k => #{v => Fun}}, Fun}]),
+ L = qlc:e(H),
+ {call,_,_,[{lc,_,{var,_,'Q'},
+ [{generate,_,_,_},
+ {op,_,_,_,_}]}]} =
+ qlc:info(H, [{format,abstract_code}])">>
],
run(Config, Ts),
diff --git a/lib/stdlib/test/rand_SUITE.erl b/lib/stdlib/test/rand_SUITE.erl
index 8e7ac223a7..fe5eaccda5 100644
--- a/lib/stdlib/test/rand_SUITE.erl
+++ b/lib/stdlib/test/rand_SUITE.erl
@@ -283,13 +283,13 @@ gen(_, _, Acc) -> lists:reverse(Acc).
%% Check that the algorithms generate sound values.
basic_stats_uniform_1(Config) when is_list(Config) ->
- ct:timetrap({minutes,6}), %% valgrind needs a lot of time
+ ct:timetrap({minutes,15}), %% valgrind needs a lot of time
[basic_uniform_1(?LOOP, rand:seed_s(Alg), 0.0, array:new([{default, 0}]))
|| Alg <- algs()],
ok.
basic_stats_uniform_2(Config) when is_list(Config) ->
- ct:timetrap({minutes,6}), %% valgrind needs a lot of time
+ ct:timetrap({minutes,15}), %% valgrind needs a lot of time
[basic_uniform_2(?LOOP, rand:seed_s(Alg), 0, array:new([{default, 0}]))
|| Alg <- algs()],
ok.
@@ -396,7 +396,7 @@ crypto_uniform_n(N, State0) ->
%% Not a test but measures the time characteristics of the different algorithms
measure(Suite) when is_atom(Suite) -> [];
measure(_Config) ->
- ct:timetrap({minutes,6}), %% valgrind needs a lot of time
+ ct:timetrap({minutes,15}), %% valgrind needs a lot of time
Algos = [crypto64|algs()],
io:format("RNG uniform integer performance~n",[]),
_ = measure_1(random, fun(State) -> {int, random:uniform_s(10000, State)} end),
diff --git a/lib/stdlib/test/random_iolist.erl b/lib/stdlib/test/random_iolist.erl
index 555f063e0a..b62cf5b82b 100644
--- a/lib/stdlib/test/random_iolist.erl
+++ b/lib/stdlib/test/random_iolist.erl
@@ -24,17 +24,13 @@
-module(random_iolist).
--export([run/3, run2/3, standard_seed/0, compare/3, compare2/3,
+-export([run/3, standard_seed/0, compare/3,
random_iolist/1]).
run(Iter,Fun1,Fun2) ->
standard_seed(),
compare(Iter,Fun1,Fun2).
-run2(Iter,Fun1,Fun2) ->
- standard_seed(),
- compare2(Iter,Fun1,Fun2).
-
random_byte() ->
rand:uniform(256) - 1.
@@ -150,16 +146,6 @@ do_comp(List,F1,F2) ->
_ ->
true
end.
-
-do_comp(List,List2,F1,F2) ->
- X = F1(List,List2),
- Y = F2(List,List2),
- case X =:= Y of
- false ->
- exit({not_matching,List,List2,X,Y});
- _ ->
- true
- end.
compare(0,Fun1,Fun2) ->
do_comp(<<>>,Fun1,Fun2),
@@ -172,25 +158,3 @@ compare(N,Fun1,Fun2) ->
L = random_iolist(N),
do_comp(L,Fun1,Fun2),
compare(N-1,Fun1,Fun2).
-
-compare2(0,Fun1,Fun2) ->
- L = random_iolist(100),
- do_comp(<<>>,L,Fun1,Fun2),
- do_comp(L,<<>>,Fun1,Fun2),
- do_comp(<<>>,<<>>,Fun1,Fun2),
- do_comp([],L,Fun1,Fun2),
- do_comp(L,[],Fun1,Fun2),
- do_comp([],[],Fun1,Fun2),
- do_comp([[]|<<>>],L,Fun1,Fun2),
- do_comp(L,[[]|<<>>],Fun1,Fun2),
- do_comp([[]|<<>>],[[]|<<>>],Fun1,Fun2),
- do_comp([<<>>,[]|<<>>],L,Fun1,Fun2),
- do_comp(L,[<<>>,[]|<<>>],Fun1,Fun2),
- do_comp([<<>>,[]|<<>>],[<<>>,[]|<<>>],Fun1,Fun2),
- true;
-
-compare2(N,Fun1,Fun2) ->
- L = random_iolist(N),
- L2 = random_iolist(N),
- do_comp(L,L2,Fun1,Fun2),
- compare2(N-1,Fun1,Fun2).
diff --git a/lib/stdlib/test/random_unicode_list.erl b/lib/stdlib/test/random_unicode_list.erl
index 8db2fa8b56..2eeb28113d 100644
--- a/lib/stdlib/test/random_unicode_list.erl
+++ b/lib/stdlib/test/random_unicode_list.erl
@@ -24,7 +24,7 @@
-module(random_unicode_list).
--export([run/3, run/4, run2/3, standard_seed/0, compare/4, compare2/3,
+-export([run/3, run/4, standard_seed/0, compare/4,
random_unicode_list/2]).
run(I,F1,F2) ->
@@ -33,10 +33,6 @@ run(Iter,Fun1,Fun2,Enc) ->
standard_seed(),
compare(Iter,Fun1,Fun2,Enc).
-run2(Iter,Fun1,Fun2) ->
- standard_seed(),
- compare2(Iter,Fun1,Fun2).
-
int_to_utf8(I) when I =< 16#7F ->
<<I>>;
int_to_utf8(I) when I =< 16#7FF ->
@@ -225,16 +221,6 @@ do_comp(List,F1,F2) ->
_ ->
true
end.
-
-do_comp(List,List2,F1,F2) ->
- X = F1(List,List2),
- Y = F2(List,List2),
- case X =:= Y of
- false ->
- exit({not_matching,List,List2,X,Y});
- _ ->
- true
- end.
compare(0,Fun1,Fun2,_Enc) ->
do_comp(<<>>,Fun1,Fun2),
@@ -247,25 +233,3 @@ compare(N,Fun1,Fun2,Enc) ->
L = random_unicode_list(N,Enc),
do_comp(L,Fun1,Fun2),
compare(N-1,Fun1,Fun2,Enc).
-
-compare2(0,Fun1,Fun2) ->
- L = random_unicode_list(100,utf8),
- do_comp(<<>>,L,Fun1,Fun2),
- do_comp(L,<<>>,Fun1,Fun2),
- do_comp(<<>>,<<>>,Fun1,Fun2),
- do_comp([],L,Fun1,Fun2),
- do_comp(L,[],Fun1,Fun2),
- do_comp([],[],Fun1,Fun2),
- do_comp([[]|<<>>],L,Fun1,Fun2),
- do_comp(L,[[]|<<>>],Fun1,Fun2),
- do_comp([[]|<<>>],[[]|<<>>],Fun1,Fun2),
- do_comp([<<>>,[]|<<>>],L,Fun1,Fun2),
- do_comp(L,[<<>>,[]|<<>>],Fun1,Fun2),
- do_comp([<<>>,[]|<<>>],[<<>>,[]|<<>>],Fun1,Fun2),
- true;
-
-compare2(N,Fun1,Fun2) ->
- L = random_unicode_list(N,utf8),
- L2 = random_unicode_list(N,utf8),
- do_comp(L,L2,Fun1,Fun2),
- compare2(N-1,Fun1,Fun2).
diff --git a/lib/stdlib/test/re_testoutput1_replacement_test.erl b/lib/stdlib/test/re_testoutput1_replacement_test.erl
index a40800d760..563e0001e4 100644
--- a/lib/stdlib/test/re_testoutput1_replacement_test.erl
+++ b/lib/stdlib/test/re_testoutput1_replacement_test.erl
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
%%
-module(re_testoutput1_replacement_test).
--compile(export_all).
+-export([run/0]).
-compile(no_native).
%% This file is generated by running run_pcre_tests:gen_repl_test("re_SUITE_data/testoutput1")
run() ->
diff --git a/lib/stdlib/test/re_testoutput1_split_test.erl b/lib/stdlib/test/re_testoutput1_split_test.erl
index 02987971fa..b39cb53a55 100644
--- a/lib/stdlib/test/re_testoutput1_split_test.erl
+++ b/lib/stdlib/test/re_testoutput1_split_test.erl
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
%%
-module(re_testoutput1_split_test).
--compile(export_all).
+-export([run/0]).
-compile(no_native).
%% This file is generated by running run_pcre_tests:gen_split_test("re_SUITE_data/testoutput1")
join([]) -> [];
diff --git a/lib/stdlib/test/run_pcre_tests.erl b/lib/stdlib/test/run_pcre_tests.erl
index ae56db59d6..b62674d6e0 100644
--- a/lib/stdlib/test/run_pcre_tests.erl
+++ b/lib/stdlib/test/run_pcre_tests.erl
@@ -18,8 +18,7 @@
%% %CopyrightEnd%
%%
-module(run_pcre_tests).
-
--compile(export_all).
+-export([test/1,gen_split_test/1,gen_repl_test/1]).
test(RootDir) ->
put(verbose,false),
@@ -119,49 +118,6 @@ test([{RE0,Line,Options0,Tests}|T],PreCompile,XMode,REAsList) ->
end
end.
-loopexec(_,_,X,Y,_,_) when X > Y ->
- {match,[]};
-loopexec(P,Chal,X,Y,Unicode,Xopt) ->
- case re:run(Chal,P,[{offset,X}]++Xopt) of
- nomatch ->
- {match,[]};
- {match,[{A,B}|More]} ->
- {match,Rest} =
- case B>0 of
- true ->
- loopexec(P,Chal,A+B,Y,Unicode,Xopt);
- false ->
- {match,M} = case re:run(Chal,P,[{offset,X},notempty,anchored]++Xopt) of
- nomatch ->
- {match,[]};
- {match,Other} ->
- {match,fixup(Chal,Other,0)}
- end,
- NewA = forward(Chal,A,1,Unicode),
- {match,MM} = loopexec(P,Chal,NewA,Y,Unicode,Xopt),
- {match,M ++ MM}
- end,
- {match,fixup(Chal,[{A,B}|More],0)++Rest}
- end.
-
-forward(_Chal,A,0,_) ->
- A;
-forward(_Chal,A,N,false) ->
- A+N;
-forward(Chal,A,N,true) ->
- <<_:A/binary,Tl/binary>> = Chal,
- Forw = case Tl of
- <<1:1,1:1,0:1,_:5,_/binary>> ->
- 2;
- <<1:1,1:1,1:1,0:1,_:4,_/binary>> ->
- 3;
- <<1:1,1:1,1:1,1:1,0:1,_:3,_/binary>> ->
- 4;
- _ ->
- 1
- end,
- forward(Chal,A+Forw,N-1,true).
-
contains_eightbit(<<>>) ->
false;
contains_eightbit(<<X:8,_/binary>>) when X >= 128 ->
@@ -201,23 +157,6 @@ clean_duplicates([X|T],L) ->
end.
-global_fixup(_,nomatch) ->
- nomatch;
-global_fixup(P,{match,M}) ->
- {match,lists:flatten(global_fixup2(P,M))}.
-
-global_fixup2(_,[]) ->
- [];
-global_fixup2(P,[H|T]) ->
- [gfixup_one(P,0,H)|global_fixup2(P,T)].
-
-gfixup_one(_,_,[]) ->
- [];
-gfixup_one(P,I,[{Start,Len}|T]) ->
- <<_:Start/binary,R:Len/binary,_/binary>> = P,
- [{I,R}|gfixup_one(P,I+1,T)].
-
-
press([]) ->
[];
press([H|T]) ->
@@ -981,7 +920,7 @@ gen_split_test(OneFile) ->
ErlFileName = ErlModule++".erl",
{ok,F}= file:open(ErlFileName,[write]),
io:format(F,"-module(~s).~n",[ErlModule]),
- io:format(F,"-compile(export_all).~n",[]),
+ io:format(F,"-export([run/0]).~n",[]),
io:format(F,"-compile(no_native).~n",[]),
io:format(F,"%% This file is generated by running ~w:gen_split_test(~p)~n",
[?MODULE,OneFile]),
@@ -1024,7 +963,7 @@ dumponesplit(F,{RE,Line,O,TS}) ->
"$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; "
"print \" <<\\\"$x\\\">> = "
"iolist_to_binary(join(re:split(\\\"~s\\\","
- "\\\"~s\\\",~p))), \\n\";'~n",
+ "\\\"~s\\\",~p))),\\n\";'~n",
[zsafe(safe(RE)),
SSS,
ysafe(safe(Str)),
@@ -1035,7 +974,7 @@ dumponesplit(F,{RE,Line,O,TS}) ->
"$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; "
"print \" <<\\\"$x\\\">> = "
"iolist_to_binary(join(re:split(\\\"~s\\\","
- "\\\"~s\\\",~p))), \\n\";'~n",
+ "\\\"~s\\\",~p))),\\n\";'~n",
[zsafe(safe(RE)),
SSS,
ysafe(safe(Str)),
@@ -1046,7 +985,7 @@ dumponesplit(F,{RE,Line,O,TS}) ->
"$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; "
"print \" <<\\\"$x\\\">> = "
"iolist_to_binary(join(re:split(\\\"~s\\\","
- "\\\"~s\\\",~p))), \\n\";'~n",
+ "\\\"~s\\\",~p))),\\n\";'~n",
[zsafe(safe(RE)),
SSS,
ysafe(safe(Str)),
@@ -1071,7 +1010,7 @@ gen_repl_test(OneFile) ->
ErlFileName = ErlModule++".erl",
{ok,F}= file:open(ErlFileName,[write]),
io:format(F,"-module(~s).~n",[ErlModule]),
- io:format(F,"-compile(export_all).~n",[]),
+ io:format(F,"-export([run/0]).~n",[]),
io:format(F,"-compile(no_native).~n",[]),
io:format(F,"%% This file is generated by running ~w:gen_repl_test(~p)~n",
[?MODULE,OneFile]),
diff --git a/lib/stdlib/test/shell_SUITE.erl b/lib/stdlib/test/shell_SUITE.erl
index 15ccdea284..4864bc3d72 100644
--- a/lib/stdlib/test/shell_SUITE.erl
+++ b/lib/stdlib/test/shell_SUITE.erl
@@ -282,7 +282,7 @@ restricted_local(Config) when is_list(Config) ->
comm_err(<<"begin F=fun() -> hello end, foo(F) end.">>),
"exception error: undefined shell command banan/1" =
comm_err(<<"begin F=fun() -> hello end, banan(F) end.">>),
- "{error,"++_ = t(<<"begin F=fun() -> hello end, c(F) end.">>),
+ "Recompiling "++_ = t(<<"c(shell_SUITE).">>),
"exception exit: restricted shell does not allow l(" ++ _ =
comm_err(<<"begin F=fun() -> hello end, l(F) end.">>),
"exception error: variable 'F' is unbound" =
diff --git a/lib/stdlib/test/sofs_SUITE.erl b/lib/stdlib/test/sofs_SUITE.erl
index 13c12ad2f2..f67bf16f0f 100644
--- a/lib/stdlib/test/sofs_SUITE.erl
+++ b/lib/stdlib/test/sofs_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -1837,11 +1837,8 @@ digraph(Conf) when is_list(Conf) ->
ok.
digraph_fail(ExitReason, Fail) ->
- {'EXIT', {ExitReason, [{sofs,family_to_digraph,A,_}|_]}} = Fail,
- case {test_server:is_native(sofs),A} of
- {false,[_,_]} -> ok;
- {true,2} -> ok
- end.
+ {'EXIT', {ExitReason, [{sofs,family_to_digraph,2,_}|_]}} = Fail,
+ ok.
constant_function(Conf) when is_list(Conf) ->
E = empty_set(),
diff --git a/lib/stdlib/test/tar_SUITE.erl b/lib/stdlib/test/tar_SUITE.erl
index 6f3979bb77..d6b6d3f80c 100644
--- a/lib/stdlib/test/tar_SUITE.erl
+++ b/lib/stdlib/test/tar_SUITE.erl
@@ -22,9 +22,10 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2, borderline/1, atomic/1, long_names/1,
create_long_names/1, bad_tar/1, errors/1, extract_from_binary/1,
- extract_from_binary_compressed/1,
+ extract_from_binary_compressed/1, extract_filtered/1,
extract_from_open_file/1, symlinks/1, open_add_close/1, cooked_compressed/1,
- memory/1,unicode/1]).
+ memory/1,unicode/1,read_other_implementations/1,
+ sparse/1, init/1]).
-include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/file.hrl").
@@ -35,7 +36,10 @@ all() ->
[borderline, atomic, long_names, create_long_names,
bad_tar, errors, extract_from_binary,
extract_from_binary_compressed, extract_from_open_file,
- symlinks, open_add_close, cooked_compressed, memory, unicode].
+ extract_filtered,
+ symlinks, open_add_close, cooked_compressed, memory, unicode,
+ read_other_implementations,
+ sparse,init].
groups() ->
[].
@@ -84,17 +88,30 @@ borderline(Config) when is_list(Config) ->
ok.
borderline_test(Size, TempDir) ->
- Archive = filename:join(TempDir, "ar_"++integer_to_list(Size)++".tar"),
- Name = filename:join(TempDir, "file_"++integer_to_list(Size)),
io:format("Testing size ~p", [Size]),
+ borderline_test(Size, TempDir, true),
+ borderline_test(Size, TempDir, false),
+ ok.
+
+borderline_test(Size, TempDir, IsUstar) ->
+ Prefix = case IsUstar of
+ true ->
+ "file_";
+ false ->
+ lists:duplicate(100, $f) ++ "ile_"
+ end,
+ SizeList = integer_to_list(Size),
+ Archive = filename:join(TempDir, "ar_"++ SizeList ++".tar"),
+ Name = filename:join(TempDir, Prefix++SizeList),
%% Create a file and archive it.
X0 = erlang:monotonic_time(),
- file:write_file(Name, random_byte_list(X0, Size)),
+ ok = file:write_file(Name, random_byte_list(X0, Size)),
ok = erl_tar:create(Archive, [Name]),
ok = file:delete(Name),
%% Verify listing and extracting.
+ IsUstar = is_ustar(Archive),
{ok, [Name]} = erl_tar:table(Archive),
ok = erl_tar:extract(Archive, [verbose]),
@@ -103,7 +120,12 @@ borderline_test(Size, TempDir) ->
true = match_byte_list(X0, binary_to_list(Bin)),
%% Verify that Unix tar can read it.
- tar_tf(Archive, Name),
+ case IsUstar of
+ true ->
+ tar_tf(Archive, Name);
+ false ->
+ ok
+ end,
ok.
@@ -336,6 +358,7 @@ create_long_names() ->
ok = erl_tar:tt(TarName),
%% Extract and verify.
+ true = is_ustar(TarName),
ExtractDir = "extract_dir",
ok = file:make_dir(ExtractDir),
ok = erl_tar:extract(TarName, [{cwd,ExtractDir}]),
@@ -357,7 +380,7 @@ make_dirs([], Dir) ->
%% Try erl_tar:table/2 and erl_tar:extract/2 on some corrupted tar files.
bad_tar(Config) when is_list(Config) ->
try_bad("bad_checksum", bad_header, Config),
- try_bad("bad_octal", bad_header, Config),
+ try_bad("bad_octal", invalid_tar_checksum, Config),
try_bad("bad_too_short", eof, Config),
try_bad("bad_even_shorter", eof, Config),
ok.
@@ -370,8 +393,10 @@ try_bad(Name0, Reason, Config) ->
Name = Name0 ++ ".tar",
io:format("~nTrying ~s", [Name]),
Full = filename:join(DataDir, Name),
- Opts = [verbose, {cwd, PrivDir}],
+ Dest = filename:join(PrivDir, Name0),
+ Opts = [verbose, {cwd, Dest}],
Expected = {error, Reason},
+ io:fwrite("Expected: ~p\n", [Expected]),
case {erl_tar:table(Full, Opts), erl_tar:extract(Full, Opts)} of
{Expected, Expected} ->
io:format("Result: ~p", [Expected]),
@@ -493,6 +518,27 @@ extract_from_binary_compressed(Config) when is_list(Config) ->
ok.
+%% Test extracting a tar archive from a binary.
+extract_filtered(Config) when is_list(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ Long = filename:join(DataDir, "no_fancy_stuff.tar"),
+ ExtractDir = filename:join(PrivDir, "extract_from_binary"),
+ ok = file:make_dir(ExtractDir),
+
+ ok = erl_tar:extract(Long, [{cwd,ExtractDir},{files,["no_fancy_stuff/EPLICENCE"]}]),
+
+ %% Verify.
+ Dir = filename:join(ExtractDir, "no_fancy_stuff"),
+ true = filelib:is_dir(Dir),
+ false = filelib:is_file(filename:join(Dir, "a_dir_list")),
+ true = filelib:is_file(filename:join(Dir, "EPLICENCE")),
+
+ %% Clean up.
+ delete_files([ExtractDir]),
+
+ ok.
+
%% Test extracting a tar archive from an open file.
extract_from_open_file(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
@@ -573,6 +619,7 @@ symlinks(Dir, BadSymlink, PointsTo) ->
ok = file:write_file(AFile, ALine),
ok = file:make_symlink(AFile, GoodSymlink),
ok = erl_tar:create(Tar, [BadSymlink, GoodSymlink, AFile], [verbose]),
+ true = is_ustar(Tar),
%% List contents of tar file.
@@ -581,6 +628,7 @@ symlinks(Dir, BadSymlink, PointsTo) ->
%% Also create another archive with the dereference flag.
ok = erl_tar:create(DerefTar, [AFile, GoodSymlink], [dereference, verbose]),
+ true = is_ustar(DerefTar),
%% Extract files to a new directory.
@@ -619,13 +667,50 @@ long_symlink(Dir) ->
ok = file:set_cwd(Dir),
AFile = "long_symlink",
- FarTooLong = "/tmp/aarrghh/this/path/is/far/longer/than/one/hundred/characters/which/is/the/maximum/number/of/characters/allowed",
- ok = file:make_symlink(FarTooLong, AFile),
- {error,Error} = erl_tar:create(Tar, [AFile], [verbose]),
- io:format("Error: ~s\n", [erl_tar:format_error(Error)]),
- {FarTooLong,symbolic_link_too_long} = Error,
+ RequiresPAX = "/tmp/aarrghh/this/path/is/far/longer/than/one/hundred/characters/which/is/the/maximum/number/of/characters/allowed",
+ ok = file:make_symlink(RequiresPAX, AFile),
+ ok = erl_tar:create(Tar, [AFile], [verbose]),
+ false = is_ustar(Tar),
+ NewDir = filename:join(Dir, "extracted"),
+ _ = file:make_dir(NewDir),
+ ok = erl_tar:extract(Tar, [{cwd, NewDir}, verbose]),
+ ok = file:set_cwd(NewDir),
+ {ok, #file_info{type=symlink}} = file:read_link_info(AFile),
+ {ok, RequiresPAX} = file:read_link(AFile),
+ ok.
+
+init(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ok = file:set_cwd(PrivDir),
+ Dir = filename:join(PrivDir, "init"),
+ ok = file:make_dir(Dir),
+
+ [{FileOne,_,_}|_] = oac_files(),
+ TarOne = filename:join(Dir, "archive1.tar"),
+ {ok,Fd} = file:open(TarOne, [write]),
+
+ %% If the arity of the fun is wrong, badarg should be returned
+ {error, badarg} = erl_tar:init(Fd, write, fun file_op_bad/1),
+
+ %% Otherwise we should be good to go
+ {ok, Tar} = erl_tar:init(Fd, write, fun file_op/2),
+ ok = erl_tar:add(Tar, FileOne, []),
+ ok = erl_tar:close(Tar),
+ {ok, [FileOne]} = erl_tar:table(TarOne),
ok.
+file_op_bad(_) ->
+ throw({error, should_never_be_called}).
+
+file_op(write, {Fd, Data}) ->
+ file:write(Fd, Data);
+file_op(position, {Fd, Pos}) ->
+ file:position(Fd, Pos);
+file_op(read2, {Fd, Size}) ->
+ file:read(Fd, Size);
+file_op(close, Fd) ->
+ file:close(Fd).
+
open_add_close(Config) when is_list(Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
ok = file:set_cwd(PrivDir),
@@ -643,17 +728,26 @@ open_add_close(Config) when is_list(Config) ->
TarOne = filename:join(Dir, "archive1.tar"),
{ok,AD} = erl_tar:open(TarOne, [write]),
ok = erl_tar:add(AD, FileOne, []),
- ok = erl_tar:add(AD, FileTwo, "second file", []),
- ok = erl_tar:add(AD, FileThree, [verbose]),
+
+ %% Add with {NameInArchive,Name}
+ ok = erl_tar:add(AD, {"second file", FileTwo}, []),
+
+ %% Add with {binary, Bin}
+ {ok,FileThreeBin} = file:read_file(FileThree),
+ ok = erl_tar:add(AD, {FileThree, FileThreeBin}, [verbose]),
+
+ %% Add with Name
ok = erl_tar:add(AD, FileThree, "chunked", [{chunks,11411},verbose]),
ok = erl_tar:add(AD, ADir, [verbose]),
ok = erl_tar:add(AD, AnotherDir, [verbose]),
ok = erl_tar:close(AD),
+ true = is_ustar(TarOne),
ok = erl_tar:t(TarOne),
ok = erl_tar:tt(TarOne),
- {ok,[FileOne,"second file",FileThree,"chunked",ADir,SomeContent]} = erl_tar:table(TarOne),
+ Expected = {ok,[FileOne,"second file",FileThree,"chunked",ADir,SomeContent]},
+ Expected = erl_tar:table(TarOne),
delete_files(["oac_file","oac_small","oac_big",Dir,AnotherDir,ADir]),
@@ -718,6 +812,41 @@ memory(Config) when is_list(Config) ->
ok = delete_files([Name1,Name2]),
ok.
+read_other_implementations(Config) when is_list(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ Files = ["v7.tar", "gnu.tar", "bsd.tar",
+ "star.tar", "pax_mtime.tar"],
+ do_read_other_implementations(Files, DataDir).
+
+do_read_other_implementations([], _DataDir) ->
+ ok;
+do_read_other_implementations([File|Rest], DataDir) ->
+ io:format("~nTrying ~s", [File]),
+ Full = filename:join(DataDir, File),
+ {ok, _} = erl_tar:table(Full),
+ {ok, _} = erl_tar:extract(Full, [memory]),
+ do_read_other_implementations(Rest, DataDir).
+
+
+%% Test handling of sparse files
+sparse(Config) when is_list(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ Sparse01Empty = "sparse01_empty.tar",
+ Sparse01 = "sparse01.tar",
+ Sparse10Empty = "sparse10_empty.tar",
+ Sparse10 = "sparse10.tar",
+ do_sparse([Sparse01Empty, Sparse01, Sparse10Empty, Sparse10], DataDir, PrivDir).
+
+do_sparse([], _DataDir, _PrivDir) ->
+ ok;
+do_sparse([Name|Rest], DataDir, PrivDir) ->
+ io:format("~nTrying sparse file ~s", [Name]),
+ Full = filename:join(DataDir, Name),
+ {ok, [_]} = erl_tar:table(Full),
+ {ok, _} = erl_tar:extract(Full, [memory]),
+ do_sparse(Rest, DataDir, PrivDir).
+
%% Test filenames with characters outside the US ASCII range.
unicode(Config) when is_list(Config) ->
run_unicode_node(Config, "+fnu"),
@@ -753,6 +882,9 @@ do_unicode(PrivDir) ->
Names = lists:sort(unicode_create_files()),
Tar = "unicöde.tar",
ok = erl_tar:create(Tar, ["unicöde"], []),
+
+ %% Unicode filenames require PAX format.
+ false = is_ustar(Tar),
{ok,Names0} = erl_tar:table(Tar, []),
Names = lists:sort(Names0),
_ = [ok = file:delete(Name) || Name <- Names],
@@ -850,3 +982,15 @@ start_node(Name, Args) ->
ct:log("Node ~p started~n", [Node]),
Node
end.
+
+%% Test that the given tar file is a plain USTAR archive,
+%% without any PAX extensions.
+is_ustar(File) ->
+ {ok,Bin} = file:read_file(File),
+ <<_:257/binary,"ustar",0,_/binary>> = Bin,
+ <<_:156/binary,Type:8,_/binary>> = Bin,
+ case Type of
+ $x -> false;
+ $g -> false;
+ _ -> true
+ end.
diff --git a/lib/stdlib/test/tar_SUITE_data/bsd.tar b/lib/stdlib/test/tar_SUITE_data/bsd.tar
new file mode 100644
index 0000000000..8c31864be0
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/bsd.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/gnu.tar b/lib/stdlib/test/tar_SUITE_data/gnu.tar
new file mode 100644
index 0000000000..60268065c1
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/gnu.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar b/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar
new file mode 100644
index 0000000000..1b6e80ffac
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse00.tar b/lib/stdlib/test/tar_SUITE_data/sparse00.tar
new file mode 100644
index 0000000000..61a04de90b
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse00.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse01.tar b/lib/stdlib/test/tar_SUITE_data/sparse01.tar
new file mode 100644
index 0000000000..61a04de90b
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse01.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar b/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar
new file mode 100644
index 0000000000..efa6d060f4
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse10.tar b/lib/stdlib/test/tar_SUITE_data/sparse10.tar
new file mode 100644
index 0000000000..61a04de90b
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse10.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar b/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar
new file mode 100644
index 0000000000..efa6d060f4
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/star.tar b/lib/stdlib/test/tar_SUITE_data/star.tar
new file mode 100644
index 0000000000..b0631e3b13
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/star.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/v7.tar b/lib/stdlib/test/tar_SUITE_data/v7.tar
new file mode 100644
index 0000000000..9918e006bb
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/v7.tar
Binary files differ
diff --git a/lib/stdlib/test/zip_SUITE.erl b/lib/stdlib/test/zip_SUITE.erl
index 7d90795c9e..f0feda217a 100644
--- a/lib/stdlib/test/zip_SUITE.erl
+++ b/lib/stdlib/test/zip_SUITE.erl
@@ -27,7 +27,7 @@
openzip_api/1, zip_api/1, open_leak/1, unzip_jar/1,
unzip_traversal_exploit/1,
compress_control/1,
- foldl/1]).
+ foldl/1,fd_leak/1]).
-include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/file.hrl").
@@ -40,7 +40,7 @@ all() ->
unzip_to_binary, zip_to_binary, unzip_options,
zip_options, list_dir_options, aliases, openzip_api,
zip_api, open_leak, unzip_jar, compress_control, foldl,
- unzip_traversal_exploit].
+ unzip_traversal_exploit,fd_leak].
groups() ->
[].
@@ -882,3 +882,35 @@ foldl(Config) ->
{error, enoent} = zip:foldl(ZipFun, [], File),
ok.
+
+fd_leak(Config) ->
+ ok = file:set_cwd(proplists:get_value(priv_dir, Config)),
+ DataDir = proplists:get_value(data_dir, Config),
+ Name = filename:join(DataDir, "bad_file_header.zip"),
+ BadExtract = fun() ->
+ {error,bad_file_header} = zip:extract(Name),
+ ok
+ end,
+ do_fd_leak(BadExtract, 1),
+
+ BadCreate = fun() ->
+ {error,enoent} = zip:zip("failed.zip",
+ ["none"]),
+ ok
+ end,
+ do_fd_leak(BadCreate, 1),
+
+ ok.
+
+do_fd_leak(_Bad, 10000) ->
+ ok;
+do_fd_leak(Bad, N) ->
+ try Bad() of
+ ok ->
+ do_fd_leak(Bad, N + 1)
+ catch
+ C:R ->
+ Stk = erlang:get_stacktrace(),
+ io:format("Bad error after ~p attempts\n", [N]),
+ erlang:raise(C, R, Stk)
+ end.
diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl
index 72170ec5da..b92cd8d607 100644
--- a/lib/syntax_tools/src/igor.erl
+++ b/lib/syntax_tools/src/igor.erl
@@ -417,7 +417,7 @@ merge_files(Name, Files, Options) ->
%%
%% <dd>Specifies a list of rules for associating object files with
%% source files, to be passed to the function
-%% `filename:find_src/2'. This can be used to change the
+%% `filelib:find_source/2'. This can be used to change the
%% way Igor looks for source files. If this option is not specified,
%% the default system rules are used. The first occurrence of this
%% option completely overrides any later in the option list.</dd>
@@ -462,7 +462,7 @@ merge_files(Name, Files, Options) ->
%% @see merge/3
%% @see merge_files/3
%% @see merge_sources/3
-%% @see //stdlib/filename:find_src/2
+%% @see //stdlib/filelib:find_source/2
%% @see epp_dodger
-spec merge_files(atom(), erl_syntax:forms(), [file:filename()], [option()]) ->
@@ -2746,8 +2746,8 @@ read_module(Name, Options) ->
%% It seems that we have no file - go on anyway,
%% just to get a decent error message.
read_module_1(Name, Options);
- {Name1, _} ->
- read_module_1(Name1 ++ ".erl", Options)
+ {ok, Name1} ->
+ read_module_1(Name1, Options)
end
end.
@@ -2807,9 +2807,9 @@ check_forms([], _) ->
ok.
find_src(Name, undefined) ->
- filename:find_src(filename(Name));
+ filelib:find_source(filename(Name));
find_src(Name, Rules) ->
- filename:find_src(filename(Name), Rules).
+ filelib:find_source(filename(Name), Rules).
%% file_type(filename()) -> {value, Type} | none
diff --git a/lib/tools/doc/src/make.xml b/lib/tools/doc/src/make.xml
index fddf5ebd7b..6b878f72fb 100644
--- a/lib/tools/doc/src/make.xml
+++ b/lib/tools/doc/src/make.xml
@@ -43,15 +43,15 @@
<fsummary>Compile a set of modules.</fsummary>
<type>
<v>Options = [Option]</v>
- <v>&nbsp;Option = noexec | load | netload | &lt;compiler option&gt;</v>
+ <v>&nbsp;Option = noexec | load | netload | {emake, Emake} | &lt;compiler option&gt;</v>
</type>
<desc>
- <p>This function first looks in the current working directory
- for a file named <c>Emakefile</c> (see below) specifying the
- set of modules to compile and the compile options to use. If
- no such file is found, the set of modules to compile
- defaults to all modules in the current working
- directory.</p>
+ <p>This function determines the set of modules to compile and the
+ compile options to use, by first looking for the <c>emake</c> make
+ option, if not present reads the configuration from a file named
+ <c>Emakefile</c> (see below). If no such file is found, the
+ set of modules to compile defaults to all modules in the
+ current working directory.</p>
<p>Traversing the set of modules, it then recompiles every module for
which at least one of the following conditions apply:</p>
<list type="bulleted">
@@ -77,6 +77,9 @@
<item><c>netload</c> <br></br>
Net load mode. Loads all recompiled modules on all known nodes.</item>
+ <item><c>{emake, Emake}</c> <br></br>
+
+ Rather than reading the <c>Emakefile</c> specify configuration explicitly.</item>
</list>
<p>All items in <c>Options</c> that are not make options are assumed
to be compiler options and are passed as-is to
@@ -108,9 +111,10 @@
<section>
<title>Emakefile</title>
- <p><c>make:all/0,1</c> and <c>make:files/1,2</c> looks in the
- current working directory for a file named <c>Emakefile</c>. If
- it exists, <c>Emakefile</c> should contain elements like this:</p>
+ <p><c>make:all/0,1</c> and <c>make:files/1,2</c> first looks for
+ <c>{emake, Emake}</c> in options, then in the current working directory
+ for a file named <c>Emakefile</c>. If present <c>Emake</c> should
+ contain elements like this:</p>
<code type="none">
Modules.
{Modules,Options}. </code>
diff --git a/lib/tools/emacs/erlang-edoc.el b/lib/tools/emacs/erlang-edoc.el
index 2801aa8ae7..d0dcc81028 100644
--- a/lib/tools/emacs/erlang-edoc.el
+++ b/lib/tools/emacs/erlang-edoc.el
@@ -36,7 +36,7 @@
"Tags that can be used anywhere within a module.")
(defvar erlang-edoc-overview-tags
- '("author" "copyright" "reference" "see" "since" "title" "version")
+ '("author" "copyright" "doc" "reference" "see" "since" "title" "version")
"Tags that can be used in an overview file.")
(defvar erlang-edoc-module-tags
@@ -45,8 +45,8 @@
"Tags that can be used before a module declaration.")
(defvar erlang-edoc-function-tags
- '("deprecated" "doc" "equiv" "hidden" "private" "see" "since" "spec"
- "throws" "type")
+ '("deprecated" "doc" "equiv" "hidden" "param" "private" "returns"
+ "see" "since" "spec" "throws" "type")
"Tags that can be used before a function definition.")
(defvar erlang-edoc-predefined-macros
@@ -169,4 +169,10 @@
(jit-lock-refontify))
(provide 'erlang-edoc)
+
+;; Local variables:
+;; coding: utf-8
+;; indent-tabs-mode: nil
+;; End:
+
;;; erlang-edoc.el ends here
diff --git a/lib/tools/emacs/erlang-eunit.el b/lib/tools/emacs/erlang-eunit.el
index 3b85e6680a..38c40927f4 100644
--- a/lib/tools/emacs/erlang-eunit.el
+++ b/lib/tools/emacs/erlang-eunit.el
@@ -68,7 +68,7 @@ buffer and vice versa"
;;;
(defun erlang-eunit-open-src-file-other-window (test-file-path)
"Open the src file which corresponds to the an EUnit test file"
- (find-file-other-window (erlang-eunit-src-filename test-file-path)))
+ (find-file-other-window (erlang-eunit-src-filename test-file-path)))
;;; Return the name and path of the EUnit test file
;;, (input may be either the source filename itself or the EUnit test filename)
@@ -154,7 +154,7 @@ buffer and vice versa"
;;; Join filenames
(defun filename-join (dir file)
(if (or (= (elt file 0) ?/)
- (= (car (last (append dir nil))) ?/))
+ (= (car (last (append dir nil))) ?/))
(concat dir file)
(concat dir "/" file)))
@@ -299,7 +299,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
;;; Compile source and EUnit test file and finally run EUnit tests for
;;; the current module
(defun erlang-eunit-compile-and-test (test-fun test-args &optional under-cover)
- "Compile the source and test files and run the EUnit test suite.
+ "Compile the source and test files and run the EUnit test suite.
If under-cover is set to t, the module under test is compile for
code coverage analysis. If under-cover is left out or not set,
@@ -311,7 +311,7 @@ and the number of times each line is covered).
With prefix arg, compiles for debug and runs tests with the verbose flag set."
(erlang-eunit-record-recent-compile under-cover)
(let ((src-filename (erlang-eunit-src-filename buffer-file-name))
- (test-filename (erlang-eunit-test-filename buffer-file-name)))
+ (test-filename (erlang-eunit-test-filename buffer-file-name)))
;; The purpose of out-maneuvering `save-some-buffers', as is done
;; below, is to ask the question about saving buffers only once,
@@ -326,9 +326,9 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
;; be placed in the source file instead. Any compilation error
;; will prevent the subsequent steps to be run (hence the `and')
(and (erlang-eunit-compile-file src-filename under-cover)
- (if (file-readable-p test-filename)
- (erlang-eunit-compile-file test-filename)
- t)
+ (if (file-readable-p test-filename)
+ (erlang-eunit-compile-file test-filename)
+ t)
(apply test-fun test-args)
(if under-cover
(save-excursion
@@ -381,16 +381,16 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
(goto-char compilation-parsing-end)
(erlang-eunit-all-list-elems-fulfill-p
(lambda (re) (let ((continue t)
- (result t))
- (while continue ; ignore warnings, stop at errors
- (if (re-search-forward re (point-max) t)
- (if (erlang-eunit-is-compilation-warning)
- t
- (setq result nil)
- (setq continue nil))
- (setq result t)
- (setq continue nil)))
- result))
+ (result t))
+ (while continue ; ignore warnings, stop at errors
+ (if (re-search-forward re (point-max) t)
+ (if (erlang-eunit-is-compilation-warning)
+ t
+ (setq result nil)
+ (setq continue nil))
+ (setq result t)
+ (setq continue nil)))
+ result))
(mapcar (lambda (e) (car e)) erlang-error-regexp-alist))))
(defun erlang-eunit-is-compilation-warning ()
@@ -402,7 +402,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
(let ((matches-p t))
(while (and list matches-p)
(if (not (funcall pred (car list)))
- (setq matches-p nil))
+ (setq matches-p nil))
(setq list (cdr list)))
matches-p))
@@ -439,15 +439,21 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
(defun erlang-eunit-ensure-keymap-for-key (key-seq)
(let ((prefix-keys (butlast (append key-seq nil)))
- (prefix-seq ""))
+ (prefix-seq ""))
(while prefix-keys
(setq prefix-seq (concat prefix-seq (make-string 1 (car prefix-keys))))
(setq prefix-keys (cdr prefix-keys))
(if (not (keymapp (lookup-key (current-local-map) prefix-seq)))
- (local-set-key prefix-seq (make-sparse-keymap))))))
+ (local-set-key prefix-seq (make-sparse-keymap))))))
(add-hook 'erlang-mode-hook 'erlang-eunit-add-key-bindings)
(provide 'erlang-eunit)
-;; erlang-eunit ends here
+
+;; Local variables:
+;; coding: utf-8
+;; indent-tabs-mode: nil
+;; End:
+
+;; erlang-eunit.el ends here
diff --git a/lib/tools/emacs/erlang-pkg.el b/lib/tools/emacs/erlang-pkg.el
index 4d0aa6fcd3..02d6bebbf4 100644
--- a/lib/tools/emacs/erlang-pkg.el
+++ b/lib/tools/emacs/erlang-pkg.el
@@ -1,3 +1,3 @@
(define-package "erlang" "2.7.0"
- "Erlang major mode"
- '())
+ "Erlang major mode"
+ '((emacs "24.1")))
diff --git a/lib/tools/emacs/erlang-skels.el b/lib/tools/emacs/erlang-skels.el
index eeba7f34e9..bdb3d9ad4a 100644
--- a/lib/tools/emacs/erlang-skels.el
+++ b/lib/tools/emacs/erlang-skels.el
@@ -1,7 +1,7 @@
;;
;; %CopyrightBegin%
;;
-;; Copyright Ericsson AB 2010-2016. All Rights Reserved.
+;; Copyright Ericsson AB 2010-2017. All Rights Reserved.
;;
;; Licensed under the Apache License, Version 2.0 (the "License");
;; you may not use this file except in compliance with the License.
@@ -915,11 +915,7 @@ Please see the function `tempo-define-template'.")
"%% process to initialize." n
(erlang-skel-separator-end 2)
"-spec init(Args :: term()) ->" n>
- "{ok, State :: term(), Data :: term()} |" n>
- "{ok, State :: term(), Data :: term()," n>
- "[gen_statem:action()] | gen_statem:action()} |" n>
- "ignore |" n>
- "{stop, Reason :: term()}." n
+ "gen_statem:init_result(atom())." n
"init([]) ->" n>
"process_flag(trap_exit, true)," n>
"{ok, state_name, #data{}}." n
@@ -1028,11 +1024,7 @@ Please see the function `tempo-define-template'.")
"%% process to initialize." n
(erlang-skel-separator-end 2)
"-spec init(Args :: term()) ->" n>
- "{ok, State :: term(), Data :: term()} |" n>
- "{ok, State :: term(), Data :: term()," n>
- "[gen_statem:action()] | gen_statem:action()} |" n>
- "ignore |" n>
- "{stop, Reason :: term()}." n
+ "gen_statem:init_result(term())." n
"init([]) ->" n>
"process_flag(trap_exit, true)," n>
"{ok, state_name, #data{}}." n
diff --git a/lib/tools/emacs/erlang-start.el b/lib/tools/emacs/erlang-start.el
index 160057e179..c35f280bf4 100644
--- a/lib/tools/emacs/erlang-start.el
+++ b/lib/tools/emacs/erlang-start.el
@@ -39,7 +39,7 @@
;;
;; Please state as exactly as possible:
;; - Version number of Erlang Mode (see the menu), Emacs, Erlang,
-;; and of any other relevant software.
+;; and of any other relevant software.
;; - What the expected result was.
;; - What you did, preferably in a repeatable step-by-step form.
;; - A description of the unexpected result.
@@ -60,7 +60,7 @@
;;
(autoload 'erlang-mode "erlang" "Major mode for editing Erlang code." t)
-(autoload 'erlang-version "erlang"
+(autoload 'erlang-version "erlang"
"Return the current version of Erlang mode." t)
(autoload 'erlang-shell "erlang" "Start a new Erlang shell." t)
(autoload 'run-erlang "erlang" "Start a new Erlang shell." t)
@@ -68,7 +68,7 @@
(autoload 'erlang-compile "erlang"
"Compile Erlang module in current buffer." t)
-(autoload 'erlang-man-module "erlang"
+(autoload 'erlang-man-module "erlang"
"Find manual page for MODULE." t)
(autoload 'erlang-man-function "erlang"
"Find manual page for NAME, where NAME is module:function." t)
@@ -108,25 +108,22 @@ A function suitable for `eldoc-documentation-function'.\n\n(fn)" nil nil)
;;
;; Associate files using interpreter "escript" with Erlang mode.
-;;
+;;
;;;###autoload
(add-to-list 'interpreter-mode-alist (cons "escript" 'erlang-mode))
;;
;; Ignore files ending in ".jam", ".vee", and ".beam" when performing
-;; file completion.
+;; file completion and in dired omit mode.
;;
;;;###autoload
(let ((erl-ext '(".jam" ".vee" ".beam")))
(while erl-ext
- (let ((cie completion-ignored-extensions))
- (while (and cie (not (string-equal (car cie) (car erl-ext))))
- (setq cie (cdr cie)))
- (if (null cie)
- (setq completion-ignored-extensions
- (cons (car erl-ext) completion-ignored-extensions))))
+ (add-to-list 'completion-ignored-extensions (car erl-ext))
+ (when (boundp 'dired-omit-extensions)
+ (add-to-list 'dired-omit-extensions (car erl-ext)))
(setq erl-ext (cdr erl-ext))))
@@ -136,4 +133,9 @@ A function suitable for `eldoc-documentation-function'.\n\n(fn)" nil nil)
(provide 'erlang-start)
+;; Local variables:
+;; coding: utf-8
+;; indent-tabs-mode: nil
+;; End:
+
;; erlang-start.el ends here.
diff --git a/lib/tools/emacs/erlang-test.el b/lib/tools/emacs/erlang-test.el
index ba6190d194..ea5d637199 100644
--- a/lib/tools/emacs/erlang-test.el
+++ b/lib/tools/emacs/erlang-test.el
@@ -2,7 +2,7 @@
;;; Unit tests for erlang.el.
-;; Author: Johan Claesson
+;; Author: Johan Claesson
;; Created: 2016-05-07
;; Keywords: erlang, languages
@@ -28,6 +28,27 @@
;;; Commentary:
;; This library require GNU Emacs 25 or later.
+;;
+;; There are two ways to run emacs unit tests.
+;;
+;; 1. Within a running emacs process. Load this file. Then to run
+;; all defined test cases:
+;;
+;; M-x ert RET t RET
+;;
+;; To run only the erlang test cases:
+;;
+;; M-x ert RET "^erlang" RET
+;;
+;;
+;; 2. In a new stand-alone emacs process. This process exits
+;; when it executed the tests. For example:
+;;
+;; emacs -Q -batch -L . -l erlang.el -l erlang-test.el \
+;; -f ert-run-tests-batch-and-exit
+;;
+;; The -L option adds a directory to the load-path. It should be the
+;; directory containing erlang.el and erlang-test.el.
;;; Code:
@@ -59,11 +80,12 @@ concatenated to form an erlang file to test on.")
tags-file-name
tags-table-list
tags-table-set-list
+ tags-add-tables
+ tags-completion-table
erlang-buffer
erlang-mode-hook
prog-mode-hook
- erlang-shell-mode-hook
- tags-add-tables)
+ erlang-shell-mode-hook)
(unwind-protect
(progn
(setq-default tags-file-name nil)
@@ -71,11 +93,14 @@ concatenated to form an erlang file to test on.")
(erlang-test-create-erlang-file erlang-file)
(erlang-test-compile-tags erlang-file tags-file)
(setq erlang-buffer (find-file-noselect erlang-file))
- (with-current-buffer erlang-buffer
- (setq-local tags-file-name tags-file))
- ;; Setting global tags-file-name is a workaround for
- ;; GNU Emacs bug#23164.
- (setq tags-file-name tags-file)
+ (if (< emacs-major-version 26)
+ (progn
+ (with-current-buffer erlang-buffer
+ (setq-local tags-file-name tags-file))
+ ;; Setting global tags-file-name is a workaround for
+ ;; GNU Emacs bug#23164.
+ (setq tags-file-name tags-file))
+ (visit-tags-table tags-file t))
(erlang-test-complete-at-point tags-file)
(erlang-test-completion-table)
(erlang-test-xref-find-definitions erlang-file erlang-buffer))
@@ -117,12 +142,20 @@ concatenated to form an erlang file to test on.")
for line = 1 then (1+ line)
do (when tagname
(switch-to-buffer erlang-buffer)
- (xref-find-definitions tagname)
- (erlang-test-verify-pos erlang-file line)
- (xref-find-definitions (concat "erlang_test:" tagname))
- (erlang-test-verify-pos erlang-file line)))
- (xref-find-definitions "erlang_test:")
- (erlang-test-verify-pos erlang-file 1))
+ (erlang-test-xref-jump tagname erlang-file line)
+ (erlang-test-xref-jump (concat "erlang_test:" tagname)
+ erlang-file line)))
+ (erlang-test-xref-jump "erlang_test:" erlang-file 1))
+
+(defun erlang-test-xref-jump (id expected-file expected-line)
+ (goto-char (point-max))
+ (insert "\n%% " id)
+ (save-buffer)
+ (if (fboundp 'xref-find-definitions)
+ (xref-find-definitions (erlang-id-to-string
+ (erlang-get-identifier-at-point)))
+ (error "xref-find-definitions not defined (too old emacs?)"))
+ (erlang-test-verify-pos expected-file expected-line))
(defun erlang-test-verify-pos (expected-file expected-line)
(should (string-equal (file-truename expected-file)
@@ -136,13 +169,13 @@ concatenated to form an erlang file to test on.")
(setq-local tags-file-name tags-file)
(insert "\nerlang_test:fun")
(erlang-complete-tag)
- (should (looking-back "erlang_test:function"))
+ (should (looking-back "erlang_test:function" (point-at-bol)))
(insert "\nfun")
(erlang-complete-tag)
- (should (looking-back "function"))
+ (should (looking-back "function" (point-at-bol)))
(insert "\nerlang_")
(erlang-complete-tag)
- (should (looking-back "erlang_test:"))))
+ (should (looking-back "erlang_test:" (point-at-bol)))))
(ert-deftest erlang-test-compile-options ()
@@ -179,6 +212,30 @@ concatenated to form an erlang file to test on.")
erlang))
+(ert-deftest erlang-test-parse-id ()
+ (cl-loop for id-string in '("fun/10"
+ "qualified-function module:fun/10"
+ "record reko"
+ "macro _SYMBOL"
+ "macro MACRO/10"
+ "module modula"
+ "macro"
+ nil)
+ for id-list in '((nil nil "fun" 10)
+ (qualified-function "module" "fun" 10)
+ (record nil "reko" nil)
+ (macro nil "_SYMBOL" nil)
+ (macro nil "MACRO" 10)
+ (module nil "modula" nil)
+ (nil nil "macro" nil)
+ nil)
+ for id-list2 = (erlang-id-to-list id-string)
+ do (should (equal id-list id-list2))
+ for id-string2 = (erlang-id-to-string id-list)
+ do (should (equal id-string id-string2))
+ collect id-list2))
+
+
(provide 'erlang-test)
;;; erlang-test.el ends here
diff --git a/lib/tools/emacs/erlang.el b/lib/tools/emacs/erlang.el
index 51f7e8e26c..59b20c552e 100644
--- a/lib/tools/emacs/erlang.el
+++ b/lib/tools/emacs/erlang.el
@@ -4,6 +4,8 @@
;; Author: Anders Lindgren
;; Keywords: erlang, languages, processes
;; Date: 2011-12-11
+;; Version: 2.7.0
+;; Package-Requires: ((emacs "24.1"))
;; %CopyrightBegin%
;;
@@ -24,7 +26,7 @@
;; %CopyrightEnd%
;;
-;; Lars Thors�n's modifications of 2000-06-07 included.
+;; Lars Thorsén's modifications of 2000-06-07 included.
;; The original version of this package was written by Robert Virding.
;;
;;; Commentary:
@@ -85,30 +87,15 @@
(defconst erlang-version "2.7"
"The version number of Erlang mode.")
-(defvar erlang-root-dir nil
+(defcustom erlang-root-dir nil
"The directory where the Erlang system is installed.
The name should not contain the trailing slash.
Should this variable be nil, no manual pages will show up in the
-Erlang mode menu.")
-
-(eval-and-compile
- (defconst erlang-emacs-major-version
- (if (boundp 'emacs-major-version)
- emacs-major-version
- (string-match "\\([0-9]+\\)\\.\\([0-9]+\\)" emacs-version)
- (erlang-string-to-int (substring emacs-version
- (match-beginning 1) (match-end 1))))
- "Major version number of Emacs."))
-
-(eval-and-compile
- (defconst erlang-emacs-minor-version
- (if (boundp 'emacs-minor-version)
- emacs-minor-version
- (string-match "\\([0-9]+\\)\\.\\([0-9]+\\)" emacs-version)
- (erlang-string-to-int (substring emacs-version
- (match-beginning 2) (match-end 2))))
- "Minor version number of Emacs."))
+Erlang mode menu."
+ :group 'erlang
+ :type '(restricted-sexp :match-alternatives (stringp 'nil))
+ :safe (lambda (val) (or (eq nil val) (stringp val))))
(defconst erlang-xemacs-p (string-match "Lucid\\|XEmacs" emacs-version)
"Non-nil when running under XEmacs or Lucid Emacs.")
@@ -129,7 +116,7 @@ Never EVER set this variable!")
erlang-menu-man-items
erlang-menu-personal-items
erlang-menu-version-items)
- "*List of menu item list to combine to create Erlang mode menu.
+ "List of menu item list to combine to create Erlang mode menu.
External programs which temporarily add menu items to the Erlang mode
menu may use this variable. Please use the function `add-hook' to add
@@ -238,7 +225,7 @@ This variable is added to the list of Erlang menus stored in
The menu is in the form described by the variable `erlang-menu-base-items'.")
(defvar erlang-mode-hook nil
- "*Functions to run when Erlang mode is activated.
+ "Functions to run when Erlang mode is activated.
This hook is used to change the behaviour of Erlang mode. It is
normally used by the user to personalise the programming environment.
@@ -272,7 +259,7 @@ To use the example, copy the following lines to your `~/.emacs' file:
(imenu-add-to-menubar \"Imenu\")))")
(defvar erlang-load-hook nil
- "*Functions to run when Erlang mode is loaded.
+ "Functions to run when Erlang mode is loaded.
This hook is used to change the behaviour of Erlang mode. It is
normally used by the user to personalise the programming environment.
@@ -304,17 +291,20 @@ manual pages can be retrieved (note that you must set the value of
A useful function is `tempo-template-erlang-normal-header'.
\(This function only exists when the `tempo' package is available.)")
-(defvar erlang-check-module-name 'ask
- "*Non-nil means check that module name and file name agrees when saving.
+(defcustom erlang-check-module-name 'ask
+ "Non-nil means check that module name and file name agrees when saving.
-If the value of this variable is the atom `ask', the user is
-prompted. If the value is t the source is silently changed.")
+If the value of this variable is the symbol `ask', the user is
+prompted. If the value is t the source is silently changed."
+ :group 'erlang
+ :type '(choice (const :tag "Check on save" 'ask)
+ (const :tag "Don't check on save" t)))
(defvar erlang-electric-commands
'(erlang-electric-comma
erlang-electric-semicolon
erlang-electric-gt)
- "*List of activated electric commands.
+ "List of activated electric commands.
The list should contain the electric commands which should be active.
Currently, the available electric commands are:
@@ -328,8 +318,8 @@ are activated.
To deactivate all electric commands, set this variable to nil.")
-(defvar erlang-electric-newline-inhibit t
- "*Set to non-nil to inhibit newline after electric command.
+(defcustom erlang-electric-newline-inhibit t
+ "Set to non-nil to inhibit newline after electric command.
This is useful since a lot of people press return after executing an
electric command.
@@ -339,28 +329,32 @@ list `erlang-electric-newline-inhibit-list'.
Note that commands in this list are required to set the variable
`erlang-electric-newline-inhibit' to nil when the newline shouldn't be
-inhibited.")
+inhibited."
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
(defvar erlang-electric-newline-inhibit-list
'(erlang-electric-semicolon
erlang-electric-comma
erlang-electric-gt)
- "*Commands which can inhibit the next newline.")
+ "Commands which can inhibit the next newline.")
-(defvar erlang-electric-semicolon-insert-blank-lines nil
- "*Number of blank lines inserted before header, or nil.
+(defcustom erlang-electric-semicolon-insert-blank-lines nil
+ "Number of blank lines inserted before header, or nil.
This variable controls the behaviour of `erlang-electric-semicolon'
when a new function header is generated. When nil, no blank line is
inserted between the current line and the new header. When bound to a
number it represents the number of blank lines which should be
-inserted.")
+inserted."
+ :group 'erlang)
(defvar erlang-electric-semicolon-criteria
'(erlang-next-lines-empty-p
erlang-at-keyword-end-p
erlang-at-end-of-function-p)
- "*List of functions controlling `erlang-electric-semicolon'.
+ "List of functions controlling `erlang-electric-semicolon'.
The functions in this list are called, in order, whenever a semicolon
is typed. Each function in the list is called with no arguments,
and should return one of the following values:
@@ -381,7 +375,7 @@ The test is performed by the function `erlang-test-criteria-list'.")
erlang-at-keyword-end-p
erlang-at-end-of-clause-p
erlang-at-end-of-function-p)
- "*List of functions controlling `erlang-electric-comma'.
+ "List of functions controlling `erlang-electric-comma'.
The functions in this list are called, in order, whenever a comma
is typed. Each function in the list is called with no arguments,
and should return one of the following values:
@@ -399,7 +393,7 @@ The test is performed by the function `erlang-test-criteria-list'.")
'(erlang-stop-when-in-type-spec
erlang-next-lines-empty-p
erlang-at-end-of-function-p)
- "*List of functions controlling the arrow aspect of `erlang-electric-gt'.
+ "List of functions controlling the arrow aspect of `erlang-electric-gt'.
The functions in this list are called, in order, whenever a `>'
is typed. Each function in the list is called with no arguments,
and should return one of the following values:
@@ -415,7 +409,7 @@ The test is performed by the function `erlang-test-criteria-list'.")
(defvar erlang-electric-newline-criteria
'(t)
- "*List of functions controlling `erlang-electric-newline'.
+ "List of functions controlling `erlang-electric-newline'.
The electric newline commands indents the next line. Should the
current line begin with a comment the comment start is copied to
@@ -435,8 +429,8 @@ list, it is treated as a function triggering the electric command.
The test is performed by the function `erlang-test-criteria-list'.")
-(defvar erlang-next-lines-empty-threshold 2
- "*Number of blank lines required to activate an electric command.
+(defcustom erlang-next-lines-empty-threshold 2
+ "Number of blank lines required to activate an electric command.
Actually, this value controls the behaviour of the function
`erlang-next-lines-empty-p' which normally is a member of the
@@ -457,46 +451,67 @@ function `erlang-next-lines-empty-p' would be removed from the criteria
lists.
Note that even if `erlang-next-lines-empty-p' should not trigger an
-electric command, other functions in the criteria list could.")
+electric command, other functions in the criteria list could."
+ :group 'erlang
+ :type '(restricted-sexp :match-alternatives (integerp 'nil))
+ :safe (lambda (val) (or (eq val nil) (integerp val))))
-(defvar erlang-new-clause-with-arguments nil
- "*Non-nil means that the arguments are cloned when a clause is generated.
+(defcustom erlang-new-clause-with-arguments nil
+ "Non-nil means that the arguments are cloned when a clause is generated.
A new function header can be generated by calls to the function
-`erlang-generate-new-clause' and by use of the electric semicolon.")
+`erlang-generate-new-clause' and by use of the electric semicolon."
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
-(defvar erlang-compile-use-outdir t
- "*When nil, go to the directory containing source file when compiling.
+(defcustom erlang-compile-use-outdir t
+ "When nil, go to the directory containing source file when compiling.
This is a workaround for a bug in the `outdir' option of compile. If the
outdir is not in the current load path, Erlang doesn't load the object
module after it has been compiled.
To activate the workaround, place the following in your `~/.emacs' file:
- (setq erlang-compile-use-outdir nil)")
-
-(defvar erlang-indent-level 4
- "*Indentation of Erlang calls/clauses within blocks.")
-(put 'erlang-indent-level 'safe-local-variable 'integerp)
-
-(defvar erlang-icr-indent nil
- "*Indentation of Erlang if/case/receive/ patterns. `nil' means
- keeping default behavior. When non-nil, indent to th column of
- if/case/receive.")
-
-(defvar erlang-indent-guard 2
- "*Indentation of Erlang guards.")
-(put 'erlang-indent-guard 'safe-local-variable 'integerp)
-
-(defvar erlang-argument-indent 2
- "*Indentation of the first argument in a function call.
+ (setq erlang-compile-use-outdir nil)"
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
+
+(defcustom erlang-indent-level 4
+ "Indentation of Erlang calls/clauses within blocks."
+ :group 'erlang
+ :type 'integer
+ :safe 'integerp)
+
+(defcustom erlang-icr-indent nil
+ "Indentation of Erlang if/case/receive patterns.
+nil means keeping default behavior. When non-nil, indent to the column of
+if/case/receive."
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
+
+(defcustom erlang-indent-guard 2
+ "Indentation of Erlang guards."
+ :group 'erlang
+ :type 'integer
+ :safe 'integerp)
+
+(defcustom erlang-argument-indent 2
+ "Indentation of the first argument in a function call.
When nil, indent to the column after the `(' of the
-function.")
-(put 'erlang-argument-indent 'safe-local-variable '(lambda (val) (or (null val) (integerp val))))
-
-(defvar erlang-tab-always-indent t
- "*Non-nil means TAB in Erlang mode should always re-indent the current line,
-regardless of where in the line point is when the TAB command is used.")
+function."
+ :group 'erlang
+ :type '(restricted-sexp :match-alternatives (integerp 'nil))
+ :safe (lambda (val) (or (eq val nil) (integerp val))))
+
+(defcustom erlang-tab-always-indent t
+ "Non-nil means TAB in Erlang mode should always re-indent the current line,
+regardless of where in the line point is when the TAB command is used."
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
(defvar erlang-man-inhibit (eq system-type 'windows-nt)
"Inhibit the creation of the Erlang Manual Pages menu.
@@ -509,7 +524,7 @@ there is no attempt to create the menu.")
("Man - Modules" "/man/man3" t)
("Man - Files" "/man/man4" t)
("Man - Applications" "/man/man6" t))
- "*The man directories displayed in the Erlang menu.
+ "The man directories displayed in the Erlang menu.
Each item in the list should be a list with three elements, the first
the name of the menu, the second the directory, and the last a flag.
@@ -517,17 +532,17 @@ Should the flag the nil, the directory is absolute, should it be non-nil
the directory is relative to the variable `erlang-root-dir'.")
(defvar erlang-man-max-menu-size 35
- "*The maximum number of menu items in one menu allowed.")
+ "The maximum number of menu items in one menu allowed.")
(defvar erlang-man-display-function 'erlang-man-display
- "*Function used to display man page.
+ "Function used to display man page.
The function is called with one argument, the name of the file
containing the man page. Use this variable when the default
function, `erlang-man-display', does not work on your system.")
(defvar erlang-compile-extra-opts '()
- "*Additional options to the compilation command.
+ "Additional options to the compilation command.
This is an elisp list of options. Each option can be either:
- an atom
- a dotted pair
@@ -539,7 +554,7 @@ Example: '(bin_opt_info (i . \"/path1/include\") (i . \"/path2/include\"))")
(".xrl\\'" . inferior-erlang-compute-leex-compile-command)
(".yrl\\'" . inferior-erlang-compute-yecc-compile-command)
("." . inferior-erlang-compute-erl-compile-command))
- "*Alist of filename patterns vs corresponding compilation functions.
+ "Alist of filename patterns vs corresponding compilation functions.
Each element looks like (REGEXP . FUNCTION). Compiling a file whose name
matches REGEXP specifies FUNCTION to use to compute the compilation
command. The FUNCTION will be called with two arguments: module name and
@@ -547,14 +562,14 @@ default compilation options, like output directory. The FUNCTION
is expected to return a string.")
(defvar erlang-leex-compile-opts '()
- "*Options to pass to leex when compiling xrl files.
+ "Options to pass to leex when compiling xrl files.
This is an elisp list of options. Each option can be either:
- an atom
- a dotted pair
- a string")
(defvar erlang-yecc-compile-opts '()
- "*Options to pass to yecc when compiling yrl files.
+ "Options to pass to yecc when compiling yrl files.
This is an elisp list of options. Each option can be either:
- an atom
- a dotted pair
@@ -562,7 +577,7 @@ This is an elisp list of options. Each option can be either:
(eval-and-compile
(defvar erlang-regexp-modern-p
- (if (> erlang-emacs-major-version 21) t nil)
+ (if (> emacs-major-version 21) t nil)
"Non-nil when this version of Emacs uses a modern version of regexp.
Supporting \_< and \_> This is determined by checking the version of Emacs used."))
@@ -608,6 +623,24 @@ The regexp must be surrounded with a pair of regexp parentheses."))
This is used to determine matches in complex regexps which contains
`erlang-variable-regexp'."))
+(defconst erlang-module-function-regexp
+ (eval-when-compile
+ (concat erlang-atom-regexp ":" erlang-atom-regexp))
+ "Regexp matching an erlang module:function.")
+
+(defconst erlang-name-regexp
+ (concat "\\("
+ "\\(?:\\sw\\|\\s_\\)+"
+ "\\|"
+ erlang-atom-quoted-regexp
+ "\\)")
+ "Matches a name of a function, macro or record")
+
+(defconst erlang-id-regexp
+ (concat "\\(?:\\(qualified-function\\|record\\|macro\\|module\\) \\)?"
+ "\\(?:" erlang-atom-regexp ":\\)?"
+ erlang-name-regexp "?"
+ "\\(?:/\\([0-9]+\\)\\)?"))
(eval-and-compile
(defun erlang-regexp-opt (strings &optional paren)
@@ -983,7 +1016,7 @@ resulting regexp is surrounded by \\_< and \\_>."
"Regexp which should match beginning of a clause.")
(defvar erlang-file-name-extension-regexp "\\.erl$"
- "*Regexp which should match an Erlang file name.
+ "Regexp which should match an Erlang file name.
This regexp is used when an Erlang module name is extracted from the
name of an Erlang source file.
@@ -997,7 +1030,7 @@ tags system should interpret tags on the form `module:tag' for
files written in other languages than Erlang.")
(defvar erlang-inferior-shell-split-window t
- "*If non-nil, when starting an inferior shell, split windows.
+ "If non-nil, when starting an inferior shell, split windows.
If nil, the inferior shell replaces the window. This is the traditional
behaviour.")
@@ -1043,7 +1076,7 @@ behaviour.")
(unless inferior-erlang-use-cmm
(define-key map "\C-x`" 'erlang-next-error))
map)
- "*Keymap used in Erlang mode.")
+ "Keymap used in Erlang mode.")
(defvar erlang-mode-abbrev-table nil
"Abbrev table in use in Erlang-mode buffers.")
(defvar erlang-mode-syntax-table nil
@@ -1310,29 +1343,6 @@ replaced by `erlang-etags-tags-completion-table'.")
;;; Avoid errors while compiling this file.
-;; `eval-when-compile' is not defined in Emacs 18. We define it as a
-;; no-op.
-(or (fboundp 'eval-when-compile)
- (defmacro eval-when-compile (&rest rest) nil))
-
-;; These umm...functions are new in Emacs 20. And, yes, until version
-;; 19.27 Emacs backquotes were this ugly.
-
-(or (fboundp 'unless)
- (defmacro unless (condition &rest body)
- "(unless CONDITION BODY...): If CONDITION is false, do BODY, else return nil."
- `((if (, condition) nil ,@body))))
-
-(or (fboundp 'when)
- (defmacro when (condition &rest body)
- "(when CONDITION BODY...): If CONDITION is true, do BODY, else return nil."
- `((if (, condition) (progn ,@body) nil))))
-
-(or (fboundp 'char-before)
- (defmacro char-before (&optional pos)
- "Return the character in the current buffer just before POS."
- `( (char-after (1- (or ,pos (point)))))))
-
;; defvar some obsolete variables, which we still support for
;; backwards compatibility reasons.
(eval-when-compile
@@ -1360,20 +1370,11 @@ replaced by `erlang-etags-tags-completion-table'.")
(defun erlang-version ()
"Return the current version of Erlang mode."
(interactive)
- (if (erlang-interactive-p)
+ (if (called-interactively-p 'interactive)
(message "Erlang mode version %s, written by Anders Lindgren"
erlang-version))
erlang-version)
-(defun erlang-interactive-p ()
- (if (fboundp 'called-interactively-p)
- (called-interactively-p 'interactive)
- (funcall (symbol-function 'interactive-p))))
-
-(unless (fboundp 'prog-mode)
- (defun prog-mode ()
- (use-local-map (make-keymap))))
-
;;;###autoload
(define-derived-mode erlang-mode prog-mode "Erlang"
"Major mode for editing Erlang source files in Emacs.
@@ -1462,40 +1463,43 @@ Other commands:
(add-to-list 'auto-mode-alist (cons r 'erlang-mode)))
(defun erlang-syntax-table-init ()
- (if (null erlang-mode-syntax-table)
- (let ((table (make-syntax-table)))
- (modify-syntax-entry ?\n ">" table)
- (modify-syntax-entry ?\" "\"" table)
- (modify-syntax-entry ?# "." table)
- ;; (modify-syntax-entry ?$ "\\" table) ;; Creates problems with indention afterwards
- ;; (modify-syntax-entry ?$ "'" table) ;; Creates syntax highlighting and indention problems
- (modify-syntax-entry ?$ "/" table) ;; Misses the corner case "string that ends with $"
- ;; we have to live with that for now..it is the best alternative
- ;; that can be worked around with "string hat ends with \$"
- (modify-syntax-entry ?% "<" table)
- (modify-syntax-entry ?& "." table)
- (modify-syntax-entry ?\' "\"" table)
- (modify-syntax-entry ?* "." table)
- (modify-syntax-entry ?+ "." table)
- (modify-syntax-entry ?- "." table)
- (modify-syntax-entry ?/ "." table)
- (modify-syntax-entry ?: "." table)
- (modify-syntax-entry ?< "." table)
- (modify-syntax-entry ?= "." table)
- (modify-syntax-entry ?> "." table)
- (modify-syntax-entry ?\\ "\\" table)
- (modify-syntax-entry ?_ "_" table)
- (modify-syntax-entry ?| "." table)
- (modify-syntax-entry ?^ "'" table)
-
- ;; Pseudo bit-syntax: Latin1 double angle quotes as parens.
- ;;(modify-syntax-entry ?\253 "(?\273" table)
- ;;(modify-syntax-entry ?\273 ")?\253" table)
-
- (setq erlang-mode-syntax-table table)))
-
+ (erlang-ensure-syntax-table-is-initialized)
(set-syntax-table erlang-mode-syntax-table))
+(defun erlang-ensure-syntax-table-is-initialized ()
+ (unless erlang-mode-syntax-table
+ (let ((table (make-syntax-table)))
+ (modify-syntax-entry ?\n ">" table)
+ (modify-syntax-entry ?\" "\"" table)
+ (modify-syntax-entry ?# "." table)
+ ;; (modify-syntax-entry ?$ "\\" table) ;; Creates problems with indention afterwards
+ ;; (modify-syntax-entry ?$ "'" table) ;; Creates syntax highlighting and indention problems
+ (modify-syntax-entry ?$ "/" table) ;; Misses the corner case "string that ends with $"
+ ;; we have to live with that for now..it is the best alternative
+ ;; that can be worked around with "string that ends with \$"
+ (modify-syntax-entry ?% "<" table)
+ (modify-syntax-entry ?& "." table)
+ (modify-syntax-entry ?\' "\"" table)
+ (modify-syntax-entry ?* "." table)
+ (modify-syntax-entry ?+ "." table)
+ (modify-syntax-entry ?- "." table)
+ (modify-syntax-entry ?/ "." table)
+ (modify-syntax-entry ?: "." table)
+ (modify-syntax-entry ?< "." table)
+ (modify-syntax-entry ?= "." table)
+ (modify-syntax-entry ?> "." table)
+ (modify-syntax-entry ?\\ "\\" table)
+ (modify-syntax-entry ?_ "_" table)
+ (modify-syntax-entry ?| "." table)
+ (modify-syntax-entry ?^ "'" table)
+
+ ;; Pseudo bit-syntax: Latin1 double angle quotes as parens.
+ ;;(modify-syntax-entry ?\253 "(?\273" table)
+ ;;(modify-syntax-entry ?\273 ")?\253" table)
+
+ (setq erlang-mode-syntax-table table))))
+
+
(defun erlang-electric-init ()
;; Set up electric character functions to work with
@@ -1541,7 +1545,7 @@ Other commands:
(make-local-variable 'indent-region-function)
(setq indent-region-function 'erlang-indent-region)
(set (make-local-variable 'comment-indent-function) 'erlang-comment-indent)
- (if (<= erlang-emacs-major-version 18)
+ (if (<= emacs-major-version 18)
(set (make-local-variable 'comment-indent-hook) 'erlang-comment-indent))
(set (make-local-variable 'parse-sexp-ignore-comments) t)
(set (make-local-variable 'dabbrev-case-fold-search) nil)
@@ -1778,7 +1782,7 @@ Please see the variable `erlang-menu-base-items'."
(if (and popup (boundp 'mode-popup-menu))
(funcall (symbol-function 'set)
'mode-popup-menu erlang-xemacs-popup-menu))))
- ((>= erlang-emacs-major-version 19)
+ ((>= emacs-major-version 19)
(define-key keymap (vector 'menu-bar (intern name))
(erlang-menu-make-keymap name items)))
(t nil)))
@@ -1961,7 +1965,9 @@ menu is left unchanged."
The variable `erlang-man-dirs' contains entries describing
the location of the manual pages."
(interactive)
- (if erlang-man-inhibit
+ (if (or erlang-man-inhibit
+ (and (boundp 'menu-bar-mode)
+ (not menu-bar-mode)))
()
(setq erlang-menu-man-items
'(nil
@@ -2000,7 +2006,7 @@ The format is described in the documentation of `erlang-man-dirs'."
(setq dir (cond ((nth 2 (car dir-list))
;; Relative to `erlang-root-dir'.
(and (stringp erlang-root-dir)
- (concat erlang-root-dir (nth 1 (car dir-list)))))
+ (erlang-man-dir (nth 1 (car dir-list)))))
(t
;; Absolute
(nth 1 (car dir-list)))))
@@ -2018,6 +2024,8 @@ The format is described in the documentation of `erlang-man-dirs'."
'(("Man Pages"
(("Error! Why?" erlang-man-describe-error)))))))
+(defun erlang-man-dir (subdir)
+ (concat erlang-root-dir "/lib/erlang/" subdir))
;; Should the menu be to long, let's split it into a number of
;; smaller menus. Warning, this code contains beautiful
@@ -2080,7 +2088,7 @@ menus is created."
"Find manual page for MODULE, defaults to module of function under point.
This function is aware of imported functions."
(interactive
- (list (let* ((mod (car-safe (erlang-get-function-under-point)))
+ (list (let* ((mod (erlang-default-module))
(input (read-string
(format "Manual entry for module%s: "
(if (or (null mod) (string= mod ""))
@@ -2089,26 +2097,36 @@ This function is aware of imported functions."
(if (string= input "")
mod
input))))
- (or module (setq module (car (erlang-get-function-under-point))))
- (if (or (null module) (string= module ""))
- (error "No Erlang module name given"))
+ (setq module (or module
+ (erlang-default-module)))
+ (when (or (null module) (string= module ""))
+ (error "No Erlang module name given"))
(let ((dir-list erlang-man-dirs)
- (pat (concat "/" (regexp-quote module) "\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$"))
+ (pat (concat "/" (regexp-quote module)
+ "\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$"))
(file nil)
file-list)
(while (and dir-list (null file))
- (setq file-list (erlang-man-get-files
- (if (nth 2 (car dir-list))
- (concat erlang-root-dir (nth 1 (car dir-list)))
- (nth 1 (car dir-list)))))
- (while (and file-list (null file))
- (if (string-match pat (car file-list))
- (setq file (car file-list)))
- (setq file-list (cdr file-list)))
- (setq dir-list (cdr dir-list)))
+ (let ((dir (if (nth 2 (car dir-list))
+ (erlang-man-dir (nth 1 (car dir-list)))
+ (nth 1 (car dir-list)))))
+ (when (file-directory-p dir)
+ (setq file-list (erlang-man-get-files dir))
+ (while (and file-list (null file))
+ (if (string-match pat (car file-list))
+ (setq file (car file-list)))
+ (setq file-list (cdr file-list))))
+ (setq dir-list (cdr dir-list))))
(if file
(funcall erlang-man-display-function file)
- (error "No manual page for module %s found" module))))
+ ;; Did not found the manual file. Fallback to manual-entry.
+ (manual-entry module))))
+
+(defun erlang-default-module ()
+ (let ((id (erlang-get-identifier-at-point)))
+ (if (eq (erlang-id-kind id) 'qualified-function)
+ (erlang-id-module id)
+ (erlang-id-name id))))
;; Warning, the function `erlang-man-function' is a hack!
@@ -2128,37 +2146,28 @@ The entry for `function' is displayed.
This function is aware of imported functions."
(interactive
- (list (let* ((mod-func (erlang-get-function-under-point))
- (mod (car-safe mod-func))
- (func (nth 1 mod-func))
+ (list (let* ((default (erlang-default-function-or-module))
(input (read-string
(format
"Manual entry for `module:func' or `module'%s: "
- (if (or (null mod) (string= mod ""))
- ""
- (format " (default %s:%s)" mod func))))))
+ (if default
+ (format " (default %s)" default)
+ "")))))
(if (string= input "")
- (if (and mod func)
- (concat mod ":" func)
- mod)
+ default
input))))
- ;; Emacs 18 doesn't provide `man'...
- (condition-case nil
- (require 'man)
- (error nil))
+ (require 'man)
+ (setq name (or name
+ (erlang-default-function-or-module)))
(let ((modname nil)
(funcname nil))
- (cond ((null name)
- (let ((mod-func (erlang-get-function-under-point)))
- (setq modname (car-safe mod-func))
- (setq funcname (nth 1 mod-func))))
- ((string-match ":" name)
+ (cond ((string-match ":" name)
(setq modname (substring name 0 (match-beginning 0)))
(setq funcname (substring name (match-end 0) nil)))
((stringp name)
(setq modname name)))
- (if (or (null modname) (string= modname ""))
- (error "No Erlang module name given"))
+ (when (or (null modname) (string= modname ""))
+ (error "No Erlang module name given"))
(cond ((fboundp 'Man-notify-when-ready)
;; Emacs 19: The man command could possibly start an
;; asynchronous process, i.e. we must hook ourselves into
@@ -2168,16 +2177,20 @@ This function is aware of imported functions."
()
(erlang-man-patch-notify)
(setq erlang-man-function-name funcname))
- (condition-case nil
+ (condition-case err
(erlang-man-module modname)
- (error (setq erlang-man-function-name nil))))
+ (error (setq erlang-man-function-name nil)
+ (signal (car err) (cdr err)))))
(t
(erlang-man-module modname)
- (if funcname
- (erlang-man-find-function
- (or (get-buffer "*Manual Entry*") ; Emacs 18
- (current-buffer)) ; XEmacs
- funcname))))))
+ (when funcname
+ (erlang-man-find-function (current-buffer) funcname))))))
+
+(defun erlang-default-function-or-module ()
+ (let ((id (erlang-get-identifier-at-point)))
+ (if (eq (erlang-id-kind id) 'qualified-function)
+ (format "%s:%s" (erlang-id-module id) (erlang-id-name id))
+ (erlang-id-name id))))
;; Should the defadvice be at the top level, the package `advice' would
@@ -2222,36 +2235,22 @@ command is executed asynchronously."
(set-window-point win (point)))
(message "Could not find function `%s'" func)))))))
+(defvar erlang-man-file-regexp
+ "\\(.*\\)/man[^/]*/\\([^.]+\\)\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$")
(defun erlang-man-display (file)
"Display FILE as a `man' file.
This is the default manual page display function.
The variables `erlang-man-display-function' contains the function
to be used."
- ;; Emacs 18 doesn't `provide' man.
- (condition-case nil
- (require 'man)
- (error nil))
+ (require 'man)
(if file
(let ((process-environment (copy-sequence process-environment)))
- (if (string-match "\\(.*\\)/man[^/]*/\\([^.]+\\)\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$" file)
+ (if (string-match erlang-man-file-regexp file)
(let ((dir (substring file (match-beginning 1) (match-end 1)))
(page (substring file (match-beginning 2) (match-end 2))))
- (if (fboundp 'setenv)
- (setenv "MANPATH" dir)
- ;; Emacs 18
- (setq process-environment (cons (concat "MANPATH=" dir)
- process-environment)))
- (cond ((not (and (not erlang-xemacs-p)
- (= erlang-emacs-major-version 19)
- (< erlang-emacs-minor-version 29)))
- (manual-entry page))
- (t
- ;; Emacs 19.28 and earlier versions of 19:
- ;; The manual-entry command unconditionally prompts
- ;; the user :-(
- (funcall (symbol-function 'Man-getpage-in-background)
- page))))
+ (setenv "MANPATH" dir)
+ (manual-entry page))
(error "Can't find man page for %s\n" file)))))
@@ -2394,7 +2393,7 @@ can contain other `tempo' attributes. Please see the function
The first character of DD is space if the value is less than 10."
(let ((date (current-time-string)))
(format "%2d %s %s"
- (erlang-string-to-int (substring date 8 10))
+ (string-to-number (substring date 8 10))
(substring date 4 7)
(substring date -4))))
@@ -2956,10 +2955,10 @@ Return nil if inside string, t if in a comment."
((eq (car stack-top) '->)
;; If in fun definition use standard indent level not double
;;(if (not (eq (car (car (cdr stack))) 'fun))
- ;; Removed it made multi clause fun's look to bad
+ ;; Removed it made multi clause fun's look too bad
(setq off (+ erlang-indent-level (if (not erlang-icr-indent)
erlang-indent-level
- erlang-icr-indent)))))
+ erlang-icr-indent)))))
(let ((base (erlang-indent-find-base stack indent-point off skip)))
;; Special cases
(goto-char indent-point)
@@ -3597,7 +3596,7 @@ corresponds to the order of the parsed Erlang list."
(erlang-remove-quotes
(erlang-buffer-substring
(match-beginning 1) (match-end 1)))
- (erlang-string-to-int
+ (string-to-number
(erlang-buffer-substring
(match-beginning
(+ 1 erlang-atom-regexp-matches))
@@ -3696,34 +3695,50 @@ Normally used in conjunction with `erlang-beginning-of-clause', e.g.:
(defun erlang-get-function-arity ()
"Return the number of arguments of function at point, or nil."
- (and (looking-at (eval-when-compile
- (concat "^" erlang-atom-regexp "\\s *(")))
- (save-excursion
- (goto-char (match-end 0))
- (condition-case nil
- (let ((res 0)
- (cont t))
- (while cont
- (cond ((eobp)
- (setq res nil)
- (setq cont nil))
- ((looking-at "\\s *)")
- (setq cont nil))
- ((looking-at "\\s *\\($\\|%\\)")
- (forward-line 1))
- ((looking-at "\\s *<<[^>]*?>>")
- (when (zerop res)
- (setq res (+ 1 res)))
- (goto-char (match-end 0)))
- ((looking-at "\\s *,")
- (setq res (+ 1 res))
- (goto-char (match-end 0)))
- (t
- (when (zerop res)
- (setq res (+ 1 res)))
- (forward-sexp 1))))
- res)
- (error nil)))))
+ (erlang-get-arity-after-regexp (concat "^" erlang-atom-regexp "\\s *(")))
+
+(defun erlang-get-argument-list-arity ()
+ "Return the number of arguments in argument list at point, or nil.
+The point should be before the opening parenthesis of the
+argument list before calling this function."
+ (erlang-get-arity-after-regexp "\\s *("))
+
+(defun erlang-get-arity-after-regexp (regexp)
+ "Return the number of arguments in argument list after REGEXP, or nil."
+ (when (looking-at regexp)
+ (save-excursion
+ (goto-char (match-end 0))
+ (erlang-get-arity))))
+
+(defun erlang-get-arity ()
+ "Return the number of arguments in argument list at point, or nil.
+The point should be after the opening parenthesis of the argument
+list before calling this function."
+ (condition-case nil
+ (let ((res 0)
+ (cont t))
+ (while cont
+ (cond ((eobp)
+ (setq res nil)
+ (setq cont nil))
+ ((looking-at "\\s *)")
+ (setq cont nil))
+ ((looking-at "\\s *\\($\\|%\\)")
+ (forward-line 1))
+ ((looking-at "\\s *<<[^>]*?>>")
+ (when (zerop res)
+ (setq res (+ 1 res)))
+ (goto-char (match-end 0)))
+ ((looking-at "\\s *,")
+ (setq res (+ 1 res))
+ (goto-char (match-end 0)))
+ (t
+ (when (zerop res)
+ (setq res (+ 1 res)))
+ (forward-sexp 1))))
+ res)
+ (error nil)))
+
(defun erlang-get-function-name-and-arity ()
"Return the name and arity of the function at point, or nil.
@@ -3746,6 +3761,8 @@ The return value is a string of the form \"foo/1\"."
(error nil)))))
+;; Keeping erlang-get-function-under-point for backward compatibility.
+;; It is used by erldoc.el and maybe other code out there.
(defun erlang-get-function-under-point ()
"Return the module and function under the point, or nil.
@@ -3755,44 +3772,141 @@ list of imported functions is searched.
The following could be returned:
(\"module\" \"function\") -- Both module and function name found.
(nil \"function\") -- No module name was found.
- nil -- No function name found
+ nil -- No function name found.
+
+See also `erlang-get-identifier-at-point'."
+ (let* ((id (erlang-get-identifier-at-point))
+ (kind (erlang-id-kind id))
+ (module (erlang-id-module id))
+ (name (erlang-id-name id)))
+ (cond ((eq kind 'qualified-function)
+ (list module name))
+ (name
+ (list nil name)))))
+
+(defun erlang-get-identifier-at-point ()
+ "Return the erlang identifier at point, or nil.
+
+Should no explicit module name be present at the point, the
+list of imported functions is searched.
+
+When an identifier is found return a list with 4 elements:
+
+1. Kind - One of the symbols qualified-function, record, macro,
+module or nil.
+
+2. Module - Module name string or nil. In case of a
+qualified-function a search fails if no entries with correct
+module are found. For other kinds the module is just a
+preference. If no matching entries are found the search will be
+retried without regard to module.
+
+3. Name - String name of function, module, record or macro.
-In the future the list may contain more elements."
+4. Arity - Integer in case of functions and macros if the number
+of arguments could be found, otherwise nil."
(save-excursion
- (let ((md (match-data))
- (res nil))
+ (save-match-data
(if (eq (char-syntax (following-char)) ? )
(skip-chars-backward " \t"))
- (skip-chars-backward "a-zA-Z0-9_:'")
- (cond ((looking-at (eval-when-compile
- (concat erlang-atom-regexp ":" erlang-atom-regexp)))
- (setq res (list
- (erlang-remove-quotes
- (erlang-buffer-substring
- (match-beginning 1) (match-end 1)))
- (erlang-remove-quotes
- (erlang-buffer-substring
- (match-beginning (1+ erlang-atom-regexp-matches))
- (match-end (1+ erlang-atom-regexp-matches)))))))
- ((looking-at erlang-atom-regexp)
- (let ((fk (erlang-remove-quotes
- (erlang-buffer-substring
- (match-beginning 0) (match-end 0))))
- (mod nil)
- (imports (erlang-get-import)))
- (while (and imports (null mod))
- (if (assoc fk (cdr (car imports)))
- (setq mod (car (car imports)))
- (setq imports (cdr imports))))
- (cond ((eq (preceding-char) ?#)
- (setq fk (concat "-record(" fk)))
- ((eq (preceding-char) ??)
- (setq fk (concat "-define(" fk)))
- ((and (null mod) (not (member fk erlang-int-bifs)))
- (setq mod (erlang-get-module))))
- (setq res (list mod fk)))))
- (store-match-data md)
- res)))
+ (skip-chars-backward "[:word:]_:'")
+ (cond ((looking-at erlang-module-function-regexp)
+ (erlang-get-qualified-function-id-at-point))
+ ((looking-at (concat erlang-atom-regexp ":"))
+ (erlang-get-module-id-at-point))
+ ((looking-at erlang-name-regexp)
+ (erlang-get-some-other-id-at-point))))))
+
+(defun erlang-get-qualified-function-id-at-point ()
+ (let ((kind 'qualified-function)
+ (module (erlang-remove-quotes
+ (erlang-buffer-substring
+ (match-beginning 1) (match-end 1))))
+ (name (erlang-remove-quotes
+ (erlang-buffer-substring
+ (match-beginning (1+ erlang-atom-regexp-matches))
+ (match-end (1+ erlang-atom-regexp-matches)))))
+ (arity (progn
+ (goto-char (match-end 0))
+ (erlang-get-argument-list-arity))))
+ (list kind module name arity)))
+
+(defun erlang-get-module-id-at-point ()
+ (let ((kind 'module)
+ (module nil)
+ (name (erlang-remove-quotes
+ (erlang-buffer-substring (match-beginning 1)
+ (match-end 1))))
+ (arity nil))
+ (list kind module name arity)))
+
+(defun erlang-get-some-other-id-at-point ()
+ (let ((name (erlang-remove-quotes
+ (erlang-buffer-substring
+ (match-beginning 0) (match-end 0))))
+ (imports (erlang-get-import))
+ kind module arity)
+ (while (and imports (null module))
+ (if (assoc name (cdr (car imports)))
+ (setq module (car (car imports)))
+ (setq imports (cdr imports))))
+ (cond ((eq (preceding-char) ?#)
+ (setq kind 'record))
+ ((eq (preceding-char) ??)
+ (setq kind 'macro))
+ ((and (null module) (not (member name erlang-int-bifs)))
+ (setq module (erlang-get-module))))
+ (setq arity (progn
+ (goto-char (match-end 0))
+ (erlang-get-argument-list-arity)))
+ (list kind module name arity)))
+
+(defmacro erlang-with-id (slots id-string &rest body)
+ (declare (indent 2))
+ (let ((id-var (make-symbol "id")))
+ `(let* ((,id-var (erlang-id-to-list ,id-string))
+ ,@(mapcar (lambda (slot)
+ (list slot
+ (list (intern (format "erlang-id-%s" slot))
+ id-var)))
+ slots))
+ ,@body)))
+
+(defun erlang-id-to-string (id)
+ (when id
+ (erlang-with-id (kind module name arity) id
+ (format "%s%s%s%s"
+ (if kind (format "%s " kind) "")
+ (if module (format "%s:" module) "")
+ name
+ (if arity (format "/%s" arity) "")))))
+
+(defun erlang-id-to-list (id)
+ (if (listp id)
+ id
+ (save-match-data
+ (erlang-ensure-syntax-table-is-initialized)
+ (with-syntax-table erlang-mode-syntax-table
+ (let (case-fold-search)
+ (when (string-match erlang-id-regexp id)
+ (list (when (match-string 1 id)
+ (intern (match-string 1 id)))
+ (match-string 2 id)
+ (match-string 3 id)
+ (when (match-string 4 id)
+ (string-to-number (match-string 4 id))))))))))
+
+(defun erlang-id-kind (id)
+ (car (erlang-id-to-list id)))
+
+(defun erlang-id-module (id)
+ (nth 1 (erlang-id-to-list id)))
+
+(defun erlang-id-name (id)
+ (nth 2 (erlang-id-to-list id)))
+
+(defun erlang-id-arity (id)
+ (nth 3 (erlang-id-to-list id)))
;; TODO: Escape single quotes inside the string without
@@ -3822,10 +3936,10 @@ In the future the list may contain more elements."
"Returns non-nil if there is an exported function in the current
buffer between point and MAX."
(block nil
- (while (and (not erlang-inhibit-exported-function-name-face)
- (erlang-match-next-function max))
- (when (erlang-last-match-exported-p)
- (return (match-data))))))
+ (while (and (not erlang-inhibit-exported-function-name-face)
+ (erlang-match-next-function max))
+ (when (erlang-last-match-exported-p)
+ (return (match-data))))))
(defun erlang-match-next-function (max)
"Searches forward in current buffer for the next erlang function,
@@ -4084,7 +4198,7 @@ non-whitespace characters following the point on the current line."
nil)))
-(defun erlang-electric-arrow\ off (&optional arg)
+(defun erlang-electric-arrow (&optional arg)
"Insert a '>'-sign and possibly a new indented line.
This command is only `electric' when the `>' is part of an `->' arrow.
@@ -4310,8 +4424,8 @@ This function is designed to be a member of a criteria list."
(looking-at "end[^_a-zA-Z0-9]")))
-;; Erlang tags support which is aware of erlang modules.
-;;
+;;; Erlang tags support which is aware of erlang modules.
+
;; Not yet implemented under XEmacs. (Hint: The Emacs 19 etags
;; package works under XEmacs.)
@@ -4369,7 +4483,7 @@ This function only works under Emacs 18 and Emacs 19. Currently, It
is not implemented under XEmacs. (Hint: The Emacs 19 etags module
works under XEmacs.)"
(interactive)
- (cond ((= erlang-emacs-major-version 18)
+ (cond ((= emacs-major-version 18)
(require 'tags)
(erlang-tags-define-keys (current-local-map))
(setq erlang-tags-installed t))
@@ -4409,20 +4523,6 @@ works under XEmacs.)"
(erlang-menu-substitute erlang-menu-base-items erlang-tags-function-alist)
(erlang-menu-init))
-
-(defun erlang-find-tag-default ()
- "Return the default tag.
-Search `-import' list of imported functions.
-Single quotes are been stripped away."
- (let ((mod-func (erlang-get-function-under-point)))
- (cond ((null mod-func)
- nil)
- ((null (car mod-func))
- (nth 1 mod-func))
- (t
- (concat (car mod-func) ":" (nth 1 mod-func))))))
-
-
;; Return `t' since it is used inside `tags-loop-form'.
;;;###autoload
(defun erlang-find-tag (modtagname &optional next-p regexp-p)
@@ -4609,7 +4709,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'."
(list nil (if (< (prefix-numeric-value current-prefix-arg) 0)
'-
t))
- (let* ((default (erlang-find-tag-default))
+ (let* ((default (erlang-default-function-or-module))
(prompt (if default
(format "%s(default %s) " prompt default)
prompt))
@@ -4633,7 +4733,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'."
;; Make sure our functions are installed in TAGS files loaded
;; into Emacs while searching.
(cond
- ((>= erlang-emacs-major-version 20)
+ ((>= emacs-major-version 20)
(setq erlang-tags-orig-format-functions
(symbol-value 'tags-table-format-functions))
(funcall (symbol-function 'set) 'tags-table-format-functions
@@ -4711,7 +4811,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'."
(defun erlang-tags-remove-module-check ()
"Remove our own tags search functions."
(cond
- ((>= erlang-emacs-major-version 20)
+ ((>= emacs-major-version 20)
(funcall (symbol-function 'set)
'tags-table-format-functions
erlang-tags-orig-format-functions)
@@ -4961,6 +5061,14 @@ about Erlang modules."
;; It adds awareness of the module:tag syntax in a similar way that is
;; done above for the old etags commands.
+(defvar erlang-current-arity nil
+ "The arity of the function currently being searched.
+
+There is no information about arity in the TAGS file.
+Consecutive functions with same name but different arity will
+only get one entry in the TAGS file. Matching TAGS entries are
+therefore selected without regarding arity. The arity is
+considered first when it is time to jump to the definition.")
(defun erlang-etags--xref-backend () 'erlang-etags)
@@ -4970,13 +5078,14 @@ about Erlang modules."
(and (erlang-soft-require 'xref)
(erlang-soft-require 'cl-generic)
+ (erlang-soft-require 'eieio)
;; The purpose of using eval here is to avoid compilation
- ;; warnings in emacsen without cl-defmethod.
+ ;; warnings in emacsen without cl-defmethod etc.
(eval
'(progn
(cl-defmethod xref-backend-identifier-at-point
((_backend (eql erlang-etags)))
- (erlang-find-tag-default))
+ (erlang-id-to-string (erlang-get-identifier-at-point)))
(cl-defmethod xref-backend-definitions
((_backend (eql erlang-etags)) identifier)
@@ -4989,42 +5098,99 @@ about Erlang modules."
(cl-defmethod xref-backend-identifier-completion-table
((_backend (eql erlang-etags)))
(let ((erlang-replace-etags-tags-completion-table t))
- (tags-completion-table))))))
-
-
+ (tags-completion-table)))
+
+ (defclass erlang-xref-location (xref-etags-location) ())
+
+ (defun erlang-convert-xrefs (xrefs)
+ (mapcar (lambda (xref)
+ (oset xref location (erlang-make-location
+ (oref xref location)))
+ xref)
+ xrefs))
+
+ (defun erlang-make-location (etags-location)
+ (with-slots (tag-info file) etags-location
+ (make-instance 'erlang-xref-location :tag-info tag-info
+ :file file)))
+
+ (cl-defmethod xref-location-marker ((locus erlang-xref-location))
+ (with-slots (tag-info file) locus
+ (with-current-buffer (find-file-noselect file)
+ (save-excursion
+ (or (erlang-goto-tag-location-by-arity tag-info)
+ (etags-goto-tag-location tag-info))
+ ;; Reset erlang-current-arity. We want to jump to
+ ;; correct arity in the first attempt. That is now
+ ;; done. Possible remaining jumps will be from
+ ;; entries in the *xref* buffer and then we want to
+ ;; ignore the arity. (Alternatively we could remove
+ ;; all but one xref entry per file when we know the
+ ;; arity).
+ (setq erlang-current-arity nil)
+ (point-marker)))))
+
+ (defun erlang-xref-context (xref)
+ (with-slots (tag-info) (xref-item-location xref)
+ (car tag-info))))))
+
+
+(defun erlang-goto-tag-location-by-arity (tag-info)
+ (when erlang-current-arity
+ (let* ((tag-text (car tag-info))
+ (tag-pos (cdr (cdr tag-info)))
+ (tag-line (car (cdr tag-info)))
+ (regexp (erlang-tag-info-regexp tag-text))
+ (startpos (or tag-pos
+ (when tag-line
+ (goto-char (point-min))
+ (forward-line (1- tag-line))
+ (point))
+ (point-min))))
+ (setq startpos (max (- startpos 2000)
+ (point-min)))
+ (goto-char startpos)
+ (let ((pos (or (erlang-search-by-arity regexp)
+ (unless (eq startpos (point-min))
+ (goto-char (point-min))
+ (erlang-search-by-arity regexp)))))
+ (when pos
+ (goto-char pos)
+ t)))))
+
+(defun erlang-tag-info-regexp (tag-text)
+ (concat "^"
+ (regexp-quote tag-text)
+ ;; Erlang function entries in TAGS includes the opening
+ ;; parenthesis for the argument list. Erlang macro entries
+ ;; do not. Add it here in order to end up in correct
+ ;; position for erlang-get-arity.
+ (if (string-prefix-p "-define" tag-text)
+ "\\s-*("
+ "")))
+
+(defun erlang-search-by-arity (regexp)
+ (let (pos)
+ (while (and (null pos)
+ (re-search-forward regexp nil t))
+ (when (eq erlang-current-arity (save-excursion (erlang-get-arity)))
+ (setq pos (point-at-bol))))
+ pos))
(defun erlang-xref-find-definitions (identifier &optional is-regexp)
- (let ((id-list (split-string identifier ":")))
- (cond
- ;; Handle "tag"
- ((null (cdr id-list))
- (erlang-xref-find-definitions-tag identifier is-regexp))
- ;; Handle "module:"
- ((string-equal (cadr id-list) "")
- (erlang-xref-find-definitions-module (car id-list)))
- ;; Handle "module:tag"
- (t
- (erlang-xref-find-definitions-module-tag (car id-list)
- (cadr id-list)
- is-regexp)))))
-
-(defun erlang-xref-find-definitions-tag (tag is-regexp)
- "Find all definitions of TAG and reorder them so that
-definitions in the currently visited file comes first."
- (when (fboundp 'etags--xref-find-definitions)
- (let* ((current-file (and (buffer-file-name)
- (file-truename (buffer-file-name))))
- (xrefs (etags--xref-find-definitions tag is-regexp))
- local-xrefs non-local-xrefs)
- (while xrefs
- (if (string-equal (erlang-xref-truename-file (car xrefs))
- current-file)
- (push (car xrefs) local-xrefs)
- (push (car xrefs) non-local-xrefs))
- (setq xrefs (cdr xrefs)))
- (append (reverse local-xrefs)
- (reverse non-local-xrefs)))))
+ (erlang-with-id (kind module name arity) identifier
+ (setq erlang-current-arity arity)
+ (cond ((eq kind 'module)
+ (erlang-xref-find-definitions-module name))
+ (module
+ (erlang-xref-find-definitions-module-tag module
+ name
+ (eq kind
+ 'qualified-function)
+ is-regexp))
+ (t
+ (erlang-xref-find-definitions-tag kind name is-regexp)))))
(defun erlang-xref-find-definitions-module (module)
(and (fboundp 'xref-make)
@@ -5048,17 +5214,58 @@ definitions in the currently visited file comes first."
(setq files (cdr files))))))
(nreverse xrefs))))
-(defun erlang-xref-find-definitions-module-tag (module tag is-regexp)
- "Find all definitions of TAG and filter away definitions
-outside of MODULE."
- (when (fboundp 'etags--xref-find-definitions)
- (let ((xrefs (etags--xref-find-definitions tag is-regexp))
- xrefs-in-module)
- (while xrefs
- (when (string-equal module (erlang-xref-module (car xrefs)))
- (push (car xrefs) xrefs-in-module))
- (setq xrefs (cdr xrefs)))
- xrefs-in-module)))
+
+(defun erlang-xref-find-definitions-module-tag (module
+ tag
+ is-qualified
+ is-regexp)
+ "Find definitions of TAG and filter away definitions outside of
+MODULE. If IS-QUALIFIED is nil and no definitions was found inside
+the MODULE then return any definitions found outside. If
+IS-REGEXP is non-nil then TAG is a regexp."
+ (and (fboundp 'etags--xref-find-definitions)
+ (fboundp 'erlang-convert-xrefs)
+ (let ((xrefs (erlang-convert-xrefs
+ (etags--xref-find-definitions tag is-regexp)))
+ xrefs-in-module)
+ (dolist (xref xrefs)
+ (when (string-equal module (erlang-xref-module xref))
+ (push xref xrefs-in-module)))
+ (cond (is-qualified xrefs-in-module)
+ (xrefs-in-module xrefs-in-module)
+ (t xrefs)))))
+
+(defun erlang-xref-find-definitions-tag (kind tag is-regexp)
+ "Find all definitions of TAG and reorder them so that
+definitions in the currently visited file comes first."
+ (and (fboundp 'etags--xref-find-definitions)
+ (fboundp 'erlang-convert-xrefs)
+ (let* ((current-file (and (buffer-file-name)
+ (file-truename (buffer-file-name))))
+ (regexp (erlang-etags-regexp kind tag is-regexp))
+ (xrefs (erlang-convert-xrefs
+ (etags--xref-find-definitions regexp t)))
+ local-xrefs non-local-xrefs)
+ (while xrefs
+ (let ((xref (car xrefs)))
+ (if (string-equal (erlang-xref-truename-file xref)
+ current-file)
+ (push xref local-xrefs)
+ (push xref non-local-xrefs))
+ (setq xrefs (cdr xrefs))))
+ (append (reverse local-xrefs)
+ (reverse non-local-xrefs)))))
+
+(defun erlang-etags-regexp (kind tag is-regexp)
+ (let ((tag-regexp (if is-regexp
+ tag
+ (regexp-quote tag))))
+ (cond ((eq kind 'record)
+ (concat "-record\\s-*(\\s-*" tag-regexp))
+ ((eq kind 'macro)
+ (concat "-define\\s-*(\\s-*" tag-regexp))
+ (t tag-regexp))))
+
(defun erlang-xref-module (xref)
(erlang-get-module-from-file-name (erlang-xref-file xref)))
@@ -5174,7 +5381,7 @@ future, a new shell on an already running host will be started."
(defvar erlang-shell-mode-hook nil
- "*User functions to run when an Erlang shell is started.
+ "User functions to run when an Erlang shell is started.
This hook is used to change the behaviour of Erlang mode. It is
normally used by the user to personalise the programming environment.
@@ -5190,7 +5397,7 @@ Erlang source file is loaded into Emacs.")
(defvar erlang-input-ring-file-name "~/.erlang_history"
- "*When non-nil, file name used to store Erlang shell history information.")
+ "When non-nil, file name used to store Erlang shell history information.")
(defun erlang-shell-mode ()
@@ -5290,7 +5497,7 @@ Selects Comint or Compilation mode command as appropriate."
;;;
(defvar inferior-erlang-display-buffer-any-frame nil
- "*When nil, `inferior-erlang-display-buffer' use only selected frame.
+ "When nil, `inferior-erlang-display-buffer' use only selected frame.
When t, all frames are searched. When 'raise, the frame is raised.")
(defvar inferior-erlang-shell-type 'newshell
@@ -5303,10 +5510,10 @@ nil, the default shell is used.
This variable influence the setting of other variables.")
(defvar inferior-erlang-machine "erl"
- "*The name of the Erlang shell.")
+ "The name of the Erlang shell.")
(defvar inferior-erlang-machine-options '()
- "*The options used when activating the Erlang shell.
+ "The options used when activating the Erlang shell.
This must be a list of strings.")
@@ -5317,7 +5524,7 @@ This must be a list of strings.")
"The name of the inferior Erlang buffer.")
(defvar inferior-erlang-prompt-timeout 60
- "*Number of seconds before `inferior-erlang-wait-prompt' timeouts.
+ "Number of seconds before `inferior-erlang-wait-prompt' timeouts.
The time specified is waited after every output made by the inferior
Erlang shell. When this variable is t, we assume that we always have
@@ -5383,7 +5590,7 @@ editing control characters:
(setq inferior-erlang-process
(get-buffer-process inferior-erlang-buffer))
- (if (> 21 erlang-emacs-major-version) ; funcalls to avoid compiler warnings
+ (if (> 21 emacs-major-version) ; funcalls to avoid compiler warnings
(funcall (symbol-function 'set-process-query-on-exit-flag)
inferior-erlang-process nil)
(funcall (symbol-function 'process-kill-without-query) inferior-erlang-process))
@@ -5454,7 +5661,7 @@ frame will become deselected before the next command."
(defun inferior-erlang-window (&optional all-frames)
"Return the window containing the inferior Erlang, or nil."
(and (inferior-erlang-running-p)
- (if (and all-frames (>= erlang-emacs-major-version 19))
+ (if (and all-frames (>= emacs-major-version 19))
(get-buffer-window inferior-erlang-buffer t)
(get-buffer-window inferior-erlang-buffer))))
@@ -5551,7 +5758,7 @@ Return the position after the newly inserted command."
(boundp 'comint-last-output-start))
(save-excursion
(goto-char
- (if (erlang-interactive-p)
+ (if (called-interactively-p 'interactive)
(symbol-value 'comint-last-input-end)
(symbol-value 'comint-last-output-start)))
(while (progn (skip-chars-forward "^\C-h")
@@ -5570,7 +5777,7 @@ Return the position after the newly inserted command."
(let ((pmark (process-mark (get-buffer-process (current-buffer)))))
(save-excursion
(goto-char
- (if (erlang-interactive-p)
+ (if (called-interactively-p 'interactive)
(symbol-value 'comint-last-input-end)
(symbol-value 'comint-last-output-start)))
(while (re-search-forward "\r+$" pmark t)
@@ -5938,12 +6145,6 @@ it assumes that NEWDEF is loaded."
(ad-unadvise 'Man-notify-when-ready)
(ad-unadvise 'set-visited-file-name)))))
-
-(defun erlang-string-to-int (string)
- (if (fboundp 'string-to-number)
- (string-to-number string)
- (funcall (symbol-function 'string-to-int) string)))
-
;; The end...
(provide 'erlang)
@@ -5951,7 +6152,7 @@ it assumes that NEWDEF is loaded."
(run-hooks 'erlang-load-hook)
;; Local variables:
-;; coding: iso-8859-1
+;; coding: utf-8
;; indent-tabs-mode: nil
;; End:
diff --git a/lib/tools/emacs/erldoc.el b/lib/tools/emacs/erldoc.el
index cb355374d9..348800f880 100644
--- a/lib/tools/emacs/erldoc.el
+++ b/lib/tools/emacs/erldoc.el
@@ -23,8 +23,8 @@
;; Crawl Erlang/OTP HTML documentation and generate lookup tables.
;;
;; This package depends on `cl-lib', `pcase' and
-;; `libxml-parse-html-region'; emacs 24+ compiled with libxml2 should
-;; work. On emacs 24.1 and 24.2 do `M-x package-install RET cl-lib
+;; `libxml-parse-html-region'. Emacs 24+ compiled with libxml2 should
+;; work. On Emacs 24.1 and 24.2 do `M-x package-install RET cl-lib
;; RET' to install `cl-lib'.
;;
;; Please customise `erldoc-man-index' to point to your local OTP
@@ -407,7 +407,7 @@ up the indexing."
(defvar erldoc-user-guides nil)
(defvar erldoc-missing-user-guides
- '("compiler" "hipe" "kernel" "os_mon" "parsetools" "typer")
+ '("compiler" "hipe" "kernel" "os_mon" "parsetools")
"List of standard Erlang applications with no user guides.")
;; Search in `code:lib_dir/0' using find LIB_DIR -type f -name
@@ -417,7 +417,7 @@ up the indexing."
"runtime_tools" "sasl" "snmp"
"ssl" "test_server"
("ssh" . "SSH") ("stdlib" . "STDLIB")
- ("hipe" . "HiPE") ("typer" . "TypEr"))
+ ("hipe" . "HiPE"))
"List of applications that come with a manual.")
(defun erldoc-user-guide-chapters (user-guide)
@@ -505,4 +505,10 @@ up the indexing."
(browse-url (cdr (assoc topic (erldoc-user-guides)))))
(provide 'erldoc)
+
+;; Local variables:
+;; coding: utf-8
+;; indent-tabs-mode: nil
+;; End:
+
;;; erldoc.el ends here
diff --git a/lib/tools/examples/xref_examples.erl b/lib/tools/examples/xref_examples.erl
index 4c082195a2..f7e71c9708 100644
--- a/lib/tools/examples/xref_examples.erl
+++ b/lib/tools/examples/xref_examples.erl
@@ -7,7 +7,7 @@
%% ${HOME}/unused_locals.txt.
script() ->
Root = code:root_dir(),
- Dir = os:getenv("HOME"),
+ {ok,[[Dir]]} = init:get_argument(home),
Server = s,
xref:start(Server),
{ok, _Relname} = xref:add_release(Server, code:lib_dir(), {name,otp}),
diff --git a/lib/tools/src/make.erl b/lib/tools/src/make.erl
index 37e67cbe34..60695febb4 100644
--- a/lib/tools/src/make.erl
+++ b/lib/tools/src/make.erl
@@ -29,7 +29,7 @@
-include_lib("kernel/include/file.hrl").
--define(MakeOpts,[noexec,load,netload,noload]).
+-define(MakeOpts,[noexec,load,netload,noload,emake]).
all_or_nothing() ->
case all() of
@@ -43,29 +43,30 @@ all() ->
all([]).
all(Options) ->
- {MakeOpts,CompileOpts} = sort_options(Options,[],[]),
- case read_emakefile('Emakefile',CompileOpts) of
- Files when is_list(Files) ->
- do_make_files(Files,MakeOpts);
- error ->
- error
- end.
+ run_emake(undefined, Options).
files(Fs) ->
files(Fs, []).
files(Fs0, Options) ->
Fs = [filename:rootname(F,".erl") || F <- Fs0],
+ run_emake(Fs, Options).
+
+run_emake(Mods, Options) ->
{MakeOpts,CompileOpts} = sort_options(Options,[],[]),
- case get_opts_from_emakefile(Fs,'Emakefile',CompileOpts) of
+ Emake = get_emake(Options),
+ case normalize_emake(Emake, Mods, CompileOpts) of
Files when is_list(Files) ->
- do_make_files(Files,MakeOpts);
- error -> error
+ do_make_files(Files,MakeOpts);
+ error ->
+ error
end.
do_make_files(Fs, Opts) ->
process(Fs, lists:member(noexec, Opts), load_opt(Opts)).
+sort_options([{emake, _}=H|T],Make,Comp) ->
+ sort_options(T,[H|Make],Comp);
sort_options([H|T],Make,Comp) ->
case lists:member(H,?MakeOpts) of
@@ -89,20 +90,35 @@ sort_options([],Make,Comp) ->
%%%
%%% These elements are converted to [{ModList,OptList},...]
%%% ModList is a list of modulenames (strings)
-read_emakefile(Emakefile,Opts) ->
- case file:consult(Emakefile) of
- {ok,Emake} ->
+
+normalize_emake(EmakeRaw, Mods, Opts) ->
+ case EmakeRaw of
+ {ok, Emake} when Mods =:= undefined ->
transform(Emake,Opts,[],[]);
- {error,enoent} ->
+ {ok, Emake} when is_list(Mods) ->
+ ModsOpts = transform(Emake,Opts,[],[]),
+ ModStrings = [coerce_2_list(M) || M <- Mods],
+ get_opts_from_emakefile(ModsOpts,ModStrings,Opts,[]);
+ {error,enoent} when Mods =:= undefined ->
%% No Emakefile found - return all modules in current
%% directory and the options given at command line
- Mods = [filename:rootname(F) || F <- filelib:wildcard("*.erl")],
+ CwdMods = [filename:rootname(F) || F <- filelib:wildcard("*.erl")],
+ [{CwdMods, Opts}];
+ {error,enoent} when is_list(Mods) ->
[{Mods, Opts}];
- {error,Other} ->
- io:format("make: Trouble reading 'Emakefile':~n~tp~n",[Other]),
+ {error, Error} ->
+ io:format("make: Trouble reading 'Emakefile':~n~tp~n",[Error]),
error
end.
+get_emake(Opts) ->
+ case proplists:get_value(emake, Opts, false) of
+ false ->
+ file:consult('Emakefile');
+ OptsEmake ->
+ {ok, OptsEmake}
+ end.
+
transform([{Mod,ModOpts}|Emake],Opts,Files,Already) ->
case expand(Mod,Already) of
[] ->
@@ -143,31 +159,19 @@ expand(Mod,Already) ->
end
end.
-%%% Reads the given Emakefile to see if there are any specific compile
+%%% Reads the given Emake to see if there are any specific compile
%%% options given for the modules.
-get_opts_from_emakefile(Mods,Emakefile,Opts) ->
- case file:consult(Emakefile) of
- {ok,Emake} ->
- Modsandopts = transform(Emake,Opts,[],[]),
- ModStrings = [coerce_2_list(M) || M <- Mods],
- get_opts_from_emakefile2(Modsandopts,ModStrings,Opts,[]);
- {error,enoent} ->
- [{Mods, Opts}];
- {error,Other} ->
- io:format("make: Trouble reading 'Emakefile':~n~tp~n",[Other]),
- error
- end.
-get_opts_from_emakefile2([{MakefileMods,O}|Rest],Mods,Opts,Result) ->
+get_opts_from_emakefile([{MakefileMods,O}|Rest],Mods,Opts,Result) ->
case members(Mods,MakefileMods,[],Mods) of
{[],_} ->
- get_opts_from_emakefile2(Rest,Mods,Opts,Result);
+ get_opts_from_emakefile(Rest,Mods,Opts,Result);
{I,RestOfMods} ->
- get_opts_from_emakefile2(Rest,RestOfMods,Opts,[{I,O}|Result])
+ get_opts_from_emakefile(Rest,RestOfMods,Opts,[{I,O}|Result])
end;
-get_opts_from_emakefile2([],[],_Opts,Result) ->
+get_opts_from_emakefile([],[],_Opts,Result) ->
Result;
-get_opts_from_emakefile2([],RestOfMods,Opts,Result) ->
+get_opts_from_emakefile([],RestOfMods,Opts,Result) ->
[{RestOfMods,Opts}|Result].
members([H|T],MakefileMods,I,Rest) ->
diff --git a/lib/tools/test/Makefile b/lib/tools/test/Makefile
index 84c4e56aff..fe65d1484d 100644
--- a/lib/tools/test/Makefile
+++ b/lib/tools/test/Makefile
@@ -52,8 +52,8 @@ RELSYSDIR = $(RELEASE_PATH)/tools_test
# ----------------------------------------------------
# FLAGS
# ----------------------------------------------------
-ERL_MAKE_FLAGS +=
-ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/percept/include
+ERL_MAKE_FLAGS +=
+ERL_COMPILE_FLAGS +=
EBIN = .
diff --git a/lib/tools/test/make_SUITE.erl b/lib/tools/test/make_SUITE.erl
index e6284db8b8..2a94ead329 100644
--- a/lib/tools/test/make_SUITE.erl
+++ b/lib/tools/test/make_SUITE.erl
@@ -20,7 +20,7 @@
-module(make_SUITE).
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
- init_per_group/2,end_per_group/2, make_all/1, make_files/1]).
+ init_per_group/2,end_per_group/2, make_all/1, make_files/1, emake_opts/1]).
-export([otp_6057_init/1,
otp_6057_a/1, otp_6057_b/1, otp_6057_c/1,
otp_6057_end/1]).
@@ -40,7 +40,7 @@
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [make_all, make_files, {group, otp_6057}].
+ [make_all, make_files, emake_opts, {group, otp_6057}].
groups() ->
[{otp_6057,[],[otp_6057_a, otp_6057_b,
@@ -86,6 +86,20 @@ make_files(Config) when is_list(Config) ->
ensure_no_messages(),
ok.
+emake_opts(Config) when is_list(Config) ->
+ Current = prepare_data_dir(Config),
+
+ %% prove that emake is used in opts instead of local Emakefile
+ Opts = [{emake, [test8, test9]}],
+ error = make:all(Opts),
+ error = make:files([test9], Opts),
+ "test8.beam" = ensure_exists([test8]),
+ "test9.beam" = ensure_exists([test9]),
+ "test5.S" = ensure_exists(["test5"],".S"),
+
+ file:set_cwd(Current),
+ ensure_no_messages(),
+ ok.
%% Moves to the data directory of this suite, clean it from any object
%% files (*.jam for a JAM emulator). Returns the previous directory.
diff --git a/lib/typer/Makefile b/lib/typer/Makefile
deleted file mode 100644
index bd1b6458a8..0000000000
--- a/lib/typer/Makefile
+++ /dev/null
@@ -1,44 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2006-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-#=============================================================================
-#
-# File: lib/typer/Makefile
-# Authors: Bingwen He, Tobias Lindahl, and Kostis Sagonas
-#
-#=============================================================================
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-#
-# Macros
-#
-
-SUB_DIRECTORIES = src doc/src
-
-include vsn.mk
-VSN = $(TYPER_VSN)
-
-SPECIAL_TARGETS =
-
-#
-# Default Subdir Targets
-#
-include $(ERL_TOP)/make/otp_subdir.mk
-
diff --git a/lib/typer/RELEASE_NOTES b/lib/typer/RELEASE_NOTES
deleted file mode 100644
index d91a815ee9..0000000000
--- a/lib/typer/RELEASE_NOTES
+++ /dev/null
@@ -1,22 +0,0 @@
-==============================================================================
- Major features, additions and changes between Typer versions
- (in reversed chronological order)
-==============================================================================
-
-Version 0.9 (in Erlang/OTP R14B02)
-----------------------------------
- - Major rewrite; all code has been cleaned up and placed in one file.
- The only reason why this is not version 1.0 yet is that there is no proper
- documentation for typer which can be displayed in the www.erlang.org site.
- - Added ability to receive the set of exported types and report unknown ones.
- - Better handling of overloaded contracts; especially erroneous ones on which
- typer does not crash anymore.
- - Fixed problem that caused typer to hang when given a file whose module name
- did not correspond to the file name.
- - Added two undocumented options that may come very handy when trying to
- understand why typer reports some particular set of types for the functions
- in a module. These options are mainly for typer developers at this point,
- but may become documented in some future version.
-
-Older versions
---------------
diff --git a/lib/typer/doc/Makefile b/lib/typer/doc/Makefile
deleted file mode 100644
index 1015ca78eb..0000000000
--- a/lib/typer/doc/Makefile
+++ /dev/null
@@ -1,40 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2006-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-SHELL=/bin/sh
-
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-clean:
- -rm -f *.html edoc-info stylesheet.css erlang.png
-
-distclean: clean
-realclean: clean
-
-# ----------------------------------------------------
-# Special Build Targets
-# ----------------------------------------------------
-
-
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
diff --git a/lib/typer/doc/html/.gitignore b/lib/typer/doc/html/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/typer/doc/html/.gitignore
+++ /dev/null
diff --git a/lib/typer/doc/pdf/.gitignore b/lib/typer/doc/pdf/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/typer/doc/pdf/.gitignore
+++ /dev/null
diff --git a/lib/typer/doc/src/Makefile b/lib/typer/doc/src/Makefile
deleted file mode 100644
index 3724a2e4d1..0000000000
--- a/lib/typer/doc/src/Makefile
+++ /dev/null
@@ -1,118 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2006-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../../vsn.mk
-VSN=$(TYPER_VSN)
-APPLICATION=typer
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-XML_APPLICATION_FILES = ref_man.xml
-XML_REF3_FILES =
-
-XML_PART_FILES = part_notes.xml
-XML_CHAPTER_FILES = notes.xml
-
-BOOK_FILES = book.xml
-
-XML_FILES = \
- $(BOOK_FILES) $(XML_CHAPTER_FILES) \
- $(XML_PART_FILES) $(XML_REF3_FILES) $(XML_APPLICATION_FILES)
-
-GIF_FILES =
-
-# ----------------------------------------------------
-
-HTML_FILES = $(XML_APPLICATION_FILES:%.xml=$(HTMLDIR)/%.html) \
- $(XML_PART_FILES:%.xml=$(HTMLDIR)/%.html)
-
-INFO_FILE = ../../info
-EXTRA_FILES = \
- $(DEFAULT_GIF_FILES) \
- $(DEFAULT_HTML_FILES) \
- $(XML_REF3_FILES:%.xml=$(HTMLDIR)/%.html) \
- $(XML_CHAPTER_FILES:%.xml=$(HTMLDIR)/%.html)
-
-MAN3_FILES = $(XML_REF3_FILES:%.xml=$(MAN3DIR)/%.3)
-
-HTML_REF_MAN_FILE = $(HTMLDIR)/index.html
-
-TOP_PDF_FILE = $(PDFDIR)/$(APPLICATION)-$(VSN).pdf
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-XML_FLAGS +=
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-$(HTMLDIR)/%.gif: %.gif
- $(INSTALL_DATA) $< $@
-
-docs: pdf html man
-
-$(TOP_PDF_FILE): $(XML_FILES)
-
-pdf: $(TOP_PDF_FILE)
-
-html: gifs $(HTML_REF_MAN_FILE)
-
-man: $(MAN3_FILES)
-
-gifs: $(GIF_FILES:%=$(HTMLDIR)/%)
-
-debug opt:
-
-clean clean_docs:
- rm -rf $(HTMLDIR)/*
- rm -f $(MAN3DIR)/*
- rm -f $(TOP_PDF_FILE) $(TOP_PDF_FILE:%.pdf=%.fo)
- rm -f errs core *~
-
-distclean: clean
-realclean: clean
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_docs_spec: docs
- $(INSTALL_DIR) "$(RELSYSDIR)/doc/pdf"
- $(INSTALL_DATA) $(TOP_PDF_FILE) "$(RELSYSDIR)/doc/pdf"
- $(INSTALL_DIR) "$(RELSYSDIR)/doc/html"
- $(INSTALL_DATA) $(HTMLDIR)/* \
- "$(RELSYSDIR)/doc/html"
- $(INSTALL_DATA) $(INFO_FILE) "$(RELSYSDIR)"
-
-
-release_spec:
diff --git a/lib/typer/doc/src/book.xml b/lib/typer/doc/src/book.xml
deleted file mode 100644
index 20da44ae04..0000000000
--- a/lib/typer/doc/src/book.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE book SYSTEM "book.dtd">
-
-<book xmlns:xi="http://www.w3.org/2001/XInclude">
- <header titlestyle="normal">
- <copyright>
- <year>2006</year><year>2016</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- </legalnotice>
-
- <title>TypEr</title>
- <prepared></prepared>
- <docno></docno>
- <date></date>
- <rev></rev>
- </header>
- <pagetext></pagetext>
- <preamble>
- </preamble>
- <pagetext>TypEr</pagetext>
- <applications>
- <xi:include href="ref_man.xml"/>
- </applications>
- <releasenotes>
- <xi:include href="notes.xml"/>
- </releasenotes>
-</book>
-
diff --git a/lib/typer/doc/src/fascicules.xml b/lib/typer/doc/src/fascicules.xml
deleted file mode 100644
index b15610fa8b..0000000000
--- a/lib/typer/doc/src/fascicules.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE fascicules SYSTEM "fascicules.dtd">
-
-<fascicules>
- <fascicule file="part_notes" href="part_notes_frame.html" entry="yes">
- Release Notes
- </fascicule>
- <fascicule file="" href="../../../../doc/print.html" entry="no">
- Off-Print
- </fascicule>
-</fascicules>
-
diff --git a/lib/typer/doc/src/notes.xml b/lib/typer/doc/src/notes.xml
deleted file mode 100644
index 9ef5ca1c70..0000000000
--- a/lib/typer/doc/src/notes.xml
+++ /dev/null
@@ -1,111 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>2014</year><year>2016</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- </legalnotice>
-
- <title>TypEr Release Notes</title>
- <prepared>otp_appnotes</prepared>
- <docno>nil</docno>
- <date>nil</date>
- <rev>nil</rev>
- <file>notes.xml</file>
- </header>
- <p>This document describes the changes made to TypEr.</p>
-
-<section><title>TypEr 0.9.11</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Internal changes</p>
- <p>
- Own Id: OTP-13551</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>TypEr 0.9.10</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>Fix a bug that could result in a crash when printing
- warnings onto standard error. </p>
- <p>
- Own Id: OTP-13010</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>TypEr 0.9.9</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p> Properly extract annotations from core code. </p>
- <p>
- Own Id: OTP-12727</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>TypEr 0.9.8</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p> The name of a compiler option has been fixed in the
- Makefile. </p>
- <p>
- Own Id: OTP-11996</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>TypEr 0.9.7</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Added initial documentation framework for TypEr.</p>
- <p>
- Own Id: OTP-11860</p>
- </item>
- </list>
- </section>
-
-</section>
-
-
-
-</chapter>
-
diff --git a/lib/typer/doc/src/part_notes.xml b/lib/typer/doc/src/part_notes.xml
deleted file mode 100644
index 3234f0903e..0000000000
--- a/lib/typer/doc/src/part_notes.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE part SYSTEM "part.dtd">
-
-<part xmlns:xi="http://www.w3.org/2001/XInclude">
- <header>
- <copyright>
- <year>2006</year><year>2016</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- </legalnotice>
-
- <title>TypEr Release Notes</title>
- <prepared></prepared>
- <docno></docno>
- <date></date>
- <rev></rev>
- </header>
- <description>
- <p><em>TypEr</em></p>
- </description>
- <xi:include href="notes.xml"/>
-</part>
-
diff --git a/lib/typer/doc/src/ref_man.xml b/lib/typer/doc/src/ref_man.xml
deleted file mode 100644
index c793207443..0000000000
--- a/lib/typer/doc/src/ref_man.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE application SYSTEM "application.dtd">
-
-<application xmlns:xi="http://www.w3.org/2001/XInclude">
- <header>
- <copyright>
- <year>2014</year><year>2016</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- </legalnotice>
-
- <title>TypEr</title>
- <prepared></prepared>
- <docno></docno>
- <date></date>
- <rev></rev>
- <file>ref_man.xml</file>
- </header>
- <description>
- </description>
- <xi:include href="typer_app.xml"/>
-</application>
-
diff --git a/lib/typer/doc/src/typer_app.xml b/lib/typer/doc/src/typer_app.xml
deleted file mode 100644
index d52df5d0da..0000000000
--- a/lib/typer/doc/src/typer_app.xml
+++ /dev/null
@@ -1,44 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE appref SYSTEM "appref.dtd">
-
-<appref>
- <header>
- <copyright>
- <year>2014</year><year>2016</year>
- <holder>Ericsson AB. All Rights Reserved.</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- </legalnotice>
-
- <title>TypEr</title>
- <prepared></prepared>
- <responsible></responsible>
- <docno></docno>
- <approved></approved>
- <checked></checked>
- <date></date>
- <rev></rev>
- <file>typer.xml</file>
- </header>
- <app>TypEr</app>
- <appsummary>The TypEr Application</appsummary>
- <description>
- <p>An Erlang/OTP application that shows type information
- for Erlang modules to the user. Additionally, it can
- annotate the code of files with such type information.</p>
- </description>
-
-</appref>
-
diff --git a/lib/typer/ebin/.gitignore b/lib/typer/ebin/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/typer/ebin/.gitignore
+++ /dev/null
diff --git a/lib/typer/info b/lib/typer/info
deleted file mode 100644
index 5145fbcfff..0000000000
--- a/lib/typer/info
+++ /dev/null
@@ -1,2 +0,0 @@
-group: tools
-short: TypEr
diff --git a/lib/typer/src/Makefile b/lib/typer/src/Makefile
deleted file mode 100644
index 6c5d8b0726..0000000000
--- a/lib/typer/src/Makefile
+++ /dev/null
@@ -1,111 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2006-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-#=============================================================================
-#
-# File: lib/typer/src/Makefile
-# Authors: Kostis Sagonas
-#
-#=============================================================================
-
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../vsn.mk
-VSN=$(TYPER_VSN)
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/typer-$(VSN)
-
-# ----------------------------------------------------
-# Orientation information -- find dialyzer's dir
-# ----------------------------------------------------
-DIALYZER_DIR = $(ERL_TOP)/lib/dialyzer
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-MODULES = typer
-
-HRL_FILES=
-ERL_FILES= $(MODULES:%=%.erl)
-INSTALL_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
-TARGET_FILES= $(INSTALL_FILES)
-
-APP_FILE= typer.app
-APP_SRC= $(APP_FILE).src
-APP_TARGET= $(EBIN)/$(APP_FILE)
-
-APPUP_FILE= typer.appup
-APPUP_SRC= $(APPUP_FILE).src
-APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-ERL_COMPILE_FLAGS += +warn_export_vars +warn_untyped_record +warn_missing_spec
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-debug opt: $(TARGET_FILES)
-
-docs:
-
-clean:
- rm -f $(TARGET_FILES)
- rm -f core
-
-# ----------------------------------------------------
-# Special Build Targets
-# ----------------------------------------------------
-
-$(EBIN)/typer.$(EMULATOR): typer.erl ../vsn.mk Makefile
- $(erlc_verbose)erlc -W $(ERL_COMPILE_FLAGS) -DVSN="\"v$(VSN)\"" -o$(EBIN) typer.erl
-
-$(APP_TARGET): $(APP_SRC) ../vsn.mk
- $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@
-
-$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
- $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@
-
-# ---------------------------------------------------------------------
-# dependencies
-# ---------------------------------------------------------------------
-
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
- $(INSTALL_DIR) "$(RELSYSDIR)/src"
- $(INSTALL_DATA) $(ERL_FILES) $(HRL_FILES) $(YRL_FILES) \
- "$(RELSYSDIR)/src"
- $(INSTALL_DIR) "$(RELSYSDIR)/ebin"
- $(INSTALL_DATA) $(INSTALL_FILES) "$(RELSYSDIR)/ebin"
-
-release_docs_spec:
diff --git a/lib/typer/src/typer.app.src b/lib/typer/src/typer.app.src
deleted file mode 100644
index 974091b44c..0000000000
--- a/lib/typer/src/typer.app.src
+++ /dev/null
@@ -1,11 +0,0 @@
-% This is an -*- erlang -*- file.
-
-{application, typer,
- [{description, "TYPe annotator for ERlang programs, version %VSN%"},
- {vsn, "%VSN%"},
- {modules, [typer]},
- {registered, []},
- {applications, [compiler, dialyzer, hipe, kernel, stdlib]},
- {env, []},
- {runtime_dependencies, ["stdlib-2.0","kernel-3.0","hipe-3.10.3","erts-6.0",
- "dialyzer-2.7","compiler-5.0"]}]}.
diff --git a/lib/typer/src/typer.erl b/lib/typer/src/typer.erl
deleted file mode 100644
index 6aee749741..0000000000
--- a/lib/typer/src/typer.erl
+++ /dev/null
@@ -1,1120 +0,0 @@
-%% -*- erlang-indent-level: 2 -*-
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-
-%%-----------------------------------------------------------------------
-%% File : typer.erl
-%% Author(s) : The first version of typer was written by Bingwen He
-%% with guidance from Kostis Sagonas and Tobias Lindahl.
-%% Since June 2008 typer is maintained by Kostis Sagonas.
-%% Description : An Erlang/OTP application that shows type information
-%% for Erlang modules to the user. Additionally, it can
-%% annotate the code of files with such type information.
-%%-----------------------------------------------------------------------
-
--module(typer).
-
--export([start/0]).
-
-%%-----------------------------------------------------------------------
-
--define(SHOW, show).
--define(SHOW_EXPORTED, show_exported).
--define(ANNOTATE, annotate).
--define(ANNOTATE_INC_FILES, annotate_inc_files).
-
--type mode() :: ?SHOW | ?SHOW_EXPORTED | ?ANNOTATE | ?ANNOTATE_INC_FILES.
-
-%%-----------------------------------------------------------------------
-
--type files() :: [file:filename()].
--type callgraph() :: dialyzer_callgraph:callgraph().
--type codeserver() :: dialyzer_codeserver:codeserver().
--type plt() :: dialyzer_plt:plt().
-
--record(analysis,
- {mode :: mode() | 'undefined',
- macros = [] :: [{atom(), term()}],
- includes = [] :: files(),
- codeserver = dialyzer_codeserver:new():: codeserver(),
- callgraph = dialyzer_callgraph:new() :: callgraph(),
- files = [] :: files(), % absolute names
- plt = none :: 'none' | file:filename(),
- no_spec = false :: boolean(),
- show_succ = false :: boolean(),
- %% For choosing between specs or edoc @spec comments
- edoc = false :: boolean(),
- %% Files in 'fms' are compilable with option 'to_pp'; we keep them
- %% as {FileName, ModuleName} in case the ModuleName is different
- fms = [] :: [{file:filename(), module()}],
- ex_func = map__new() :: map_dict(),
- record = map__new() :: map_dict(),
- func = map__new() :: map_dict(),
- inc_func = map__new() :: map_dict(),
- trust_plt = dialyzer_plt:new() :: plt()}).
--type analysis() :: #analysis{}.
-
--record(args, {files = [] :: files(),
- files_r = [] :: files(),
- trusted = [] :: files()}).
--type args() :: #args{}.
-
-%%--------------------------------------------------------------------
-
--spec start() -> no_return().
-
-start() ->
- {Args, Analysis} = process_cl_args(),
- %% io:format("Args: ~p\n", [Args]),
- %% io:format("Analysis: ~p\n", [Analysis]),
- Timer = dialyzer_timing:init(false),
- TrustedFiles = filter_fd(Args#args.trusted, [], fun is_erl_file/1),
- Analysis2 = extract(Analysis, TrustedFiles),
- All_Files = get_all_files(Args),
- %% io:format("All_Files: ~p\n", [All_Files]),
- Analysis3 = Analysis2#analysis{files = All_Files},
- Analysis4 = collect_info(Analysis3),
- %% io:format("Final: ~p\n", [Analysis4#analysis.fms]),
- TypeInfo = get_type_info(Analysis4),
- dialyzer_timing:stop(Timer),
- show_or_annotate(TypeInfo),
- %% io:format("\nTyper analysis finished\n"),
- erlang:halt(0).
-
-%%--------------------------------------------------------------------
-
--spec extract(analysis(), files()) -> analysis().
-
-extract(#analysis{macros = Macros,
- includes = Includes,
- trust_plt = TrustPLT} = Analysis, TrustedFiles) ->
- %% io:format("--- Extracting trusted typer_info... "),
- Ds = [{d, Name, Value} || {Name, Value} <- Macros],
- CodeServer = dialyzer_codeserver:new(),
- Fun =
- fun(File, CS) ->
- %% We include one more dir; the one above the one we are trusting
- %% E.g, for /home/tests/typer_ann/test.ann.erl, we should include
- %% /home/tests/ rather than /home/tests/typer_ann/
- AllIncludes = [filename:dirname(filename:dirname(File)) | Includes],
- Is = [{i, Dir} || Dir <- AllIncludes],
- CompOpts = dialyzer_utils:src_compiler_opts() ++ Is ++ Ds,
- case dialyzer_utils:get_abstract_code_from_src(File, CompOpts) of
- {ok, AbstractCode} ->
- case dialyzer_utils:get_record_and_type_info(AbstractCode) of
- {ok, RecDict} ->
- Mod = list_to_atom(filename:basename(File, ".erl")),
- case dialyzer_utils:get_spec_info(Mod, AbstractCode, RecDict) of
- {ok, SpecDict, CbDict} ->
- CS1 = dialyzer_codeserver:store_temp_records(Mod, RecDict, CS),
- dialyzer_codeserver:store_temp_contracts(Mod, SpecDict, CbDict, CS1);
- {error, Reason} -> compile_error([Reason])
- end;
- {error, Reason} -> compile_error([Reason])
- end;
- {error, Reason} -> compile_error(Reason)
- end
- end,
- CodeServer1 = lists:foldl(Fun, CodeServer, TrustedFiles),
- %% Process remote types
- NewCodeServer =
- try
- NewRecords = dialyzer_codeserver:get_temp_records(CodeServer1),
- NewExpTypes = dialyzer_codeserver:get_temp_exported_types(CodeServer1),
- case sets:size(NewExpTypes) of 0 -> ok end,
- OldRecords = dialyzer_plt:get_types(TrustPLT), % XXX change to the PLT?
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
- CodeServer2 = dialyzer_codeserver:set_temp_records(MergedRecords, CodeServer1),
- CodeServer3 = dialyzer_codeserver:finalize_exported_types(NewExpTypes, CodeServer2),
- CodeServer4 = dialyzer_utils:process_record_remote_types(CodeServer3),
- dialyzer_contracts:process_contract_remote_types(CodeServer4)
- catch
- throw:{error, ErrorMsg} ->
- compile_error(ErrorMsg)
- end,
- %% Create TrustPLT
- Contracts = dialyzer_codeserver:get_contracts(NewCodeServer),
- Modules = dict:fetch_keys(Contracts),
- FoldFun =
- fun(Module, TmpPlt) ->
- {ok, ModuleContracts} = dict:find(Module, Contracts),
- SpecList = [{MFA, Contract}
- || {MFA, {_FileLine, Contract}} <- dict:to_list(ModuleContracts)],
- dialyzer_plt:insert_contract_list(TmpPlt, SpecList)
- end,
- NewTrustPLT = lists:foldl(FoldFun, TrustPLT, Modules),
- Analysis#analysis{trust_plt = NewTrustPLT}.
-
-%%--------------------------------------------------------------------
-
--spec get_type_info(analysis()) -> analysis().
-
-get_type_info(#analysis{callgraph = CallGraph,
- trust_plt = TrustPLT,
- codeserver = CodeServer} = Analysis) ->
- StrippedCallGraph = remove_external(CallGraph, TrustPLT),
- %% io:format("--- Analyzing callgraph... "),
- try
- NewPlt = dialyzer_succ_typings:analyze_callgraph(StrippedCallGraph,
- TrustPLT, CodeServer),
- Analysis#analysis{callgraph = StrippedCallGraph, trust_plt = NewPlt}
- catch
- error:What ->
- fatal_error(io_lib:format("Analysis failed with message: ~p",
- [{What, erlang:get_stacktrace()}]));
- throw:{dialyzer_succ_typing_error, Msg} ->
- fatal_error(io_lib:format("Analysis failed with message: ~s", [Msg]))
- end.
-
--spec remove_external(callgraph(), plt()) -> callgraph().
-
-remove_external(CallGraph, PLT) ->
- {StrippedCG0, Ext} = dialyzer_callgraph:remove_external(CallGraph),
- case get_external(Ext, PLT) of
- [] -> ok;
- Externals ->
- msg(io_lib:format(" Unknown functions: ~p\n", [lists:usort(Externals)])),
- ExtTypes = rcv_ext_types(),
- case ExtTypes of
- [] -> ok;
- _ -> msg(io_lib:format(" Unknown types: ~p\n", [ExtTypes]))
- end
- end,
- StrippedCG0.
-
--spec get_external([{mfa(), mfa()}], plt()) -> [mfa()].
-
-get_external(Exts, Plt) ->
- Fun = fun ({_From, To = {M, F, A}}, Acc) ->
- case dialyzer_plt:contains_mfa(Plt, To) of
- false ->
- case erl_bif_types:is_known(M, F, A) of
- true -> Acc;
- false -> [To|Acc]
- end;
- true -> Acc
- end
- end,
- lists:foldl(Fun, [], Exts).
-
-%%--------------------------------------------------------------------
-%% Showing type information or annotating files with such information.
-%%--------------------------------------------------------------------
-
--define(TYPER_ANN_DIR, "typer_ann").
-
--type line() :: non_neg_integer().
--type fa() :: {atom(), arity()}.
--type func_info() :: {line(), atom(), arity()}.
-
--record(info, {records = map__new() :: map_dict(),
- functions = [] :: [func_info()],
- types = map__new() :: map_dict(),
- edoc = false :: boolean()}).
--record(inc, {map = map__new() :: map_dict(), filter = [] :: files()}).
--type inc() :: #inc{}.
-
--spec show_or_annotate(analysis()) -> 'ok'.
-
-show_or_annotate(#analysis{mode = Mode, fms = Files} = Analysis) ->
- case Mode of
- ?SHOW -> show(Analysis);
- ?SHOW_EXPORTED -> show(Analysis);
- ?ANNOTATE ->
- Fun = fun ({File, Module}) ->
- Info = get_final_info(File, Module, Analysis),
- write_typed_file(File, Info)
- end,
- lists:foreach(Fun, Files);
- ?ANNOTATE_INC_FILES ->
- IncInfo = write_and_collect_inc_info(Analysis),
- write_inc_files(IncInfo)
- end.
-
-write_and_collect_inc_info(Analysis) ->
- Fun = fun ({File, Module}, Inc) ->
- Info = get_final_info(File, Module, Analysis),
- write_typed_file(File, Info),
- IncFuns = get_functions(File, Analysis),
- collect_imported_functions(IncFuns, Info#info.types, Inc)
- end,
- NewInc = lists:foldl(Fun, #inc{}, Analysis#analysis.fms),
- clean_inc(NewInc).
-
-write_inc_files(Inc) ->
- Fun =
- fun (File) ->
- Val = map__lookup(File, Inc#inc.map),
- %% Val is function with its type info
- %% in form [{{Line,F,A},Type}]
- Functions = [Key || {Key, _} <- Val],
- Val1 = [{{F,A},Type} || {{_Line,F,A},Type} <- Val],
- Info = #info{types = map__from_list(Val1),
- records = map__new(),
- %% Note we need to sort functions here!
- functions = lists:keysort(1, Functions)},
- %% io:format("Types ~p\n", [Info#info.types]),
- %% io:format("Functions ~p\n", [Info#info.functions]),
- %% io:format("Records ~p\n", [Info#info.records]),
- write_typed_file(File, Info)
- end,
- lists:foreach(Fun, dict:fetch_keys(Inc#inc.map)).
-
-show(Analysis) ->
- Fun = fun ({File, Module}) ->
- Info = get_final_info(File, Module, Analysis),
- show_type_info(File, Info)
- end,
- lists:foreach(Fun, Analysis#analysis.fms).
-
-get_final_info(File, Module, Analysis) ->
- Records = get_records(File, Analysis),
- Types = get_types(Module, Analysis, Records),
- Functions = get_functions(File, Analysis),
- Edoc = Analysis#analysis.edoc,
- #info{records = Records, functions = Functions, types = Types, edoc = Edoc}.
-
-collect_imported_functions(Functions, Types, Inc) ->
- %% Coming from other sourses, including:
- %% FIXME: How to deal with yecc-generated file????
- %% --.yrl (yecc-generated file)???
- %% -- yeccpre.hrl (yecc-generated file)???
- %% -- other cases
- Fun = fun ({File, _} = Obj, I) ->
- case is_yecc_gen(File, I) of
- {true, NewI} -> NewI;
- {false, NewI} ->
- check_imported_functions(Obj, NewI, Types)
- end
- end,
- lists:foldl(Fun, Inc, Functions).
-
--spec is_yecc_gen(file:filename(), inc()) -> {boolean(), inc()}.
-
-is_yecc_gen(File, #inc{filter = Fs} = Inc) ->
- case lists:member(File, Fs) of
- true -> {true, Inc};
- false ->
- case filename:extension(File) of
- ".yrl" ->
- Rootname = filename:rootname(File, ".yrl"),
- Obj = Rootname ++ ".erl",
- case lists:member(Obj, Fs) of
- true -> {true, Inc};
- false ->
- NewInc = Inc#inc{filter = [Obj|Fs]},
- {true, NewInc}
- end;
- _ ->
- case filename:basename(File) of
- "yeccpre.hrl" -> {true, Inc};
- _ -> {false, Inc}
- end
- end
- end.
-
-check_imported_functions({File, {Line, F, A}}, Inc, Types) ->
- IncMap = Inc#inc.map,
- FA = {F, A},
- Type = get_type_info(FA, Types),
- case map__lookup(File, IncMap) of
- none -> %% File is not added. Add it
- Obj = {File,[{FA, {Line, Type}}]},
- NewMap = map__insert(Obj, IncMap),
- Inc#inc{map = NewMap};
- Val -> %% File is already in. Check.
- case lists:keyfind(FA, 1, Val) of
- false ->
- %% Function is not in; add it
- Obj = {File, Val ++ [{FA, {Line, Type}}]},
- NewMap = map__insert(Obj, IncMap),
- Inc#inc{map = NewMap};
- Type ->
- %% Function is in and with same type
- Inc;
- _ ->
- %% Function is in but with diff type
- inc_warning(FA, File),
- Elem = lists:keydelete(FA, 1, Val),
- NewMap = case Elem of
- [] -> map__remove(File, IncMap);
- _ -> map__insert({File, Elem}, IncMap)
- end,
- Inc#inc{map = NewMap}
- end
- end.
-
-inc_warning({F, A}, File) ->
- io:format(" ***Warning: Skip function ~p/~p ", [F, A]),
- io:format("in file ~p because of inconsistent type\n", [File]).
-
-clean_inc(Inc) ->
- Inc1 = remove_yecc_generated_file(Inc),
- normalize_obj(Inc1).
-
-remove_yecc_generated_file(#inc{filter = Filter} = Inc) ->
- Fun = fun (Key, #inc{map = Map} = I) ->
- I#inc{map = map__remove(Key, Map)}
- end,
- lists:foldl(Fun, Inc, Filter).
-
-normalize_obj(TmpInc) ->
- Fun = fun (Key, Val, Inc) ->
- NewVal = [{{Line,F,A},Type} || {{F,A},{Line,Type}} <- Val],
- map__insert({Key, NewVal}, Inc)
- end,
- TmpInc#inc{map = map__fold(Fun, map__new(), TmpInc#inc.map)}.
-
-get_records(File, Analysis) ->
- map__lookup(File, Analysis#analysis.record).
-
-get_types(Module, Analysis, Records) ->
- TypeInfoPlt = Analysis#analysis.trust_plt,
- TypeInfo =
- case dialyzer_plt:lookup_module(TypeInfoPlt, Module) of
- none -> [];
- {value, List} -> List
- end,
- CodeServer = Analysis#analysis.codeserver,
- TypeInfoList =
- case Analysis#analysis.show_succ of
- true ->
- [convert_type_info(I) || I <- TypeInfo];
- false ->
- [get_type(I, CodeServer, Records) || I <- TypeInfo]
- end,
- map__from_list(TypeInfoList).
-
-convert_type_info({{_M, F, A}, Range, Arg}) ->
- {{F, A}, {Range, Arg}}.
-
-get_type({{M, F, A} = MFA, Range, Arg}, CodeServer, Records) ->
- case dialyzer_codeserver:lookup_mfa_contract(MFA, CodeServer) of
- error ->
- {{F, A}, {Range, Arg}};
- {ok, {_FileLine, Contract, _Xtra}} ->
- Sig = erl_types:t_fun(Arg, Range),
- case dialyzer_contracts:check_contract(Contract, Sig) of
- ok -> {{F, A}, {contract, Contract}};
- {error, {extra_range, _, _}} ->
- {{F, A}, {contract, Contract}};
- {error, {overlapping_contract, []}} ->
- {{F, A}, {contract, Contract}};
- {error, invalid_contract} ->
- CString = dialyzer_contracts:contract_to_string(Contract),
- SigString = dialyzer_utils:format_sig(Sig, Records),
- Msg = io_lib:format("Error in contract of function ~w:~w/~w\n"
- "\t The contract is: " ++ CString ++ "\n" ++
- "\t but the inferred signature is: ~s",
- [M, F, A, SigString]),
- fatal_error(Msg);
- {error, ErrorStr} when is_list(ErrorStr) -> % ErrorStr is a string()
- Msg = io_lib:format("Error in contract of function ~w:~w/~w: ~s",
- [M, F, A, ErrorStr]),
- fatal_error(Msg)
- end
- end.
-
-get_functions(File, Analysis) ->
- case Analysis#analysis.mode of
- ?SHOW ->
- Funcs = map__lookup(File, Analysis#analysis.func),
- Inc_Funcs = map__lookup(File, Analysis#analysis.inc_func),
- remove_module_info(Funcs) ++ normalize_incFuncs(Inc_Funcs);
- ?SHOW_EXPORTED ->
- Ex_Funcs = map__lookup(File, Analysis#analysis.ex_func),
- remove_module_info(Ex_Funcs);
- ?ANNOTATE ->
- Funcs = map__lookup(File, Analysis#analysis.func),
- remove_module_info(Funcs);
- ?ANNOTATE_INC_FILES ->
- map__lookup(File, Analysis#analysis.inc_func)
- end.
-
-normalize_incFuncs(Functions) ->
- [FunInfo || {_FileName, FunInfo} <- Functions].
-
--spec remove_module_info([func_info()]) -> [func_info()].
-
-remove_module_info(FunInfoList) ->
- F = fun ({_,module_info,0}) -> false;
- ({_,module_info,1}) -> false;
- ({Line,F,A}) when is_integer(Line), is_atom(F), is_integer(A) -> true
- end,
- lists:filter(F, FunInfoList).
-
-write_typed_file(File, Info) ->
- io:format(" Processing file: ~p\n", [File]),
- Dir = filename:dirname(File),
- RootName = filename:basename(filename:rootname(File)),
- Ext = filename:extension(File),
- TyperAnnDir = filename:join(Dir, ?TYPER_ANN_DIR),
- TmpNewFilename = lists:concat([RootName, ".ann", Ext]),
- NewFileName = filename:join(TyperAnnDir, TmpNewFilename),
- case file:make_dir(TyperAnnDir) of
- {error, Reason} ->
- case Reason of
- eexist -> %% TypEr dir exists; remove old typer files if they exist
- case file:delete(NewFileName) of
- ok -> ok;
- {error, enoent} -> ok;
- {error, _} ->
- Msg = io_lib:format("Error in deleting file ~s\n", [NewFileName]),
- fatal_error(Msg)
- end,
- write_typed_file(File, Info, NewFileName);
- enospc ->
- Msg = io_lib:format("Not enough space in ~p\n", [Dir]),
- fatal_error(Msg);
- eacces ->
- Msg = io_lib:format("No write permission in ~p\n", [Dir]),
- fatal_error(Msg);
- _ ->
- Msg = io_lib:format("Unhandled error ~s when writing ~p\n",
- [Reason, Dir]),
- fatal_error(Msg)
- end;
- ok -> %% Typer dir does NOT exist
- write_typed_file(File, Info, NewFileName)
- end.
-
-write_typed_file(File, Info, NewFileName) ->
- {ok, Binary} = file:read_file(File),
- Chars = binary_to_list(Binary),
- write_typed_file(Chars, NewFileName, Info, 1, []),
- io:format(" Saved as: ~p\n", [NewFileName]).
-
-write_typed_file(Chars, File, #info{functions = []}, _LNo, _Acc) ->
- ok = file:write_file(File, list_to_binary(Chars), [append]);
-write_typed_file([Ch|Chs] = Chars, File, Info, LineNo, Acc) ->
- [{Line,F,A}|RestFuncs] = Info#info.functions,
- case Line of
- 1 -> %% This will happen only for inc files
- ok = raw_write(F, A, Info, File, []),
- NewInfo = Info#info{functions = RestFuncs},
- NewAcc = [],
- write_typed_file(Chars, File, NewInfo, Line, NewAcc);
- _ ->
- case Ch of
- 10 ->
- NewLineNo = LineNo + 1,
- {NewInfo, NewAcc} =
- case NewLineNo of
- Line ->
- ok = raw_write(F, A, Info, File, [Ch|Acc]),
- {Info#info{functions = RestFuncs}, []};
- _ ->
- {Info, [Ch|Acc]}
- end,
- write_typed_file(Chs, File, NewInfo, NewLineNo, NewAcc);
- _ ->
- write_typed_file(Chs, File, Info, LineNo, [Ch|Acc])
- end
- end.
-
-raw_write(F, A, Info, File, Content) ->
- TypeInfo = get_type_string(F, A, Info, file),
- ContentList = lists:reverse(Content) ++ TypeInfo ++ "\n",
- ContentBin = list_to_binary(ContentList),
- file:write_file(File, ContentBin, [append]).
-
-get_type_string(F, A, Info, Mode) ->
- Type = get_type_info({F,A}, Info#info.types),
- TypeStr =
- case Type of
- {contract, C} ->
- dialyzer_contracts:contract_to_string(C);
- {RetType, ArgType} ->
- Sig = erl_types:t_fun(ArgType, RetType),
- dialyzer_utils:format_sig(Sig, Info#info.records)
- end,
- case Info#info.edoc of
- false ->
- case {Mode, Type} of
- {file, {contract, _}} -> "";
- _ ->
- Prefix = lists:concat(["-spec ", erl_types:atom_to_string(F)]),
- lists:concat([Prefix, TypeStr, "."])
- end;
- true ->
- Prefix = lists:concat(["%% @spec ", F]),
- lists:concat([Prefix, TypeStr, "."])
- end.
-
-show_type_info(File, Info) ->
- io:format("\n%% File: ~p\n%% ", [File]),
- OutputString = lists:concat(["~.", length(File)+8, "c~n"]),
- io:fwrite(OutputString, [$-]),
- Fun = fun ({_LineNo, F, A}) ->
- TypeInfo = get_type_string(F, A, Info, show),
- io:format("~s\n", [TypeInfo])
- end,
- lists:foreach(Fun, Info#info.functions).
-
-get_type_info(Func, Types) ->
- case map__lookup(Func, Types) of
- none ->
- %% Note: Typeinfo of any function should exist in
- %% the result offered by dialyzer, otherwise there
- %% *must* be something wrong with the analysis
- Msg = io_lib:format("No type info for function: ~p\n", [Func]),
- fatal_error(Msg);
- {contract, _Fun} = C -> C;
- {_RetType, _ArgType} = RA -> RA
- end.
-
-%%--------------------------------------------------------------------
-%% Processing of command-line options and arguments.
-%%--------------------------------------------------------------------
-
--spec process_cl_args() -> {args(), analysis()}.
-
-process_cl_args() ->
- ArgList = init:get_plain_arguments(),
- %% io:format("Args is ~p\n", [ArgList]),
- {Args, Analysis} = analyze_args(ArgList, #args{}, #analysis{}),
- %% if the mode has not been set, set it to the default mode (show)
- {Args, case Analysis#analysis.mode of
- undefined -> Analysis#analysis{mode = ?SHOW};
- Mode when is_atom(Mode) -> Analysis
- end}.
-
-analyze_args([], Args, Analysis) ->
- {Args, Analysis};
-analyze_args(ArgList, Args, Analysis) ->
- {Result, Rest} = cl(ArgList),
- {NewArgs, NewAnalysis} = analyze_result(Result, Args, Analysis),
- analyze_args(Rest, NewArgs, NewAnalysis).
-
-cl(["-h"|_]) -> help_message();
-cl(["--help"|_]) -> help_message();
-cl(["-v"|_]) -> version_message();
-cl(["--version"|_]) -> version_message();
-cl(["--edoc"|Opts]) -> {edoc, Opts};
-cl(["--show"|Opts]) -> {{mode, ?SHOW}, Opts};
-cl(["--show_exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts};
-cl(["--show-exported"|Opts]) -> {{mode, ?SHOW_EXPORTED}, Opts};
-cl(["--show_success_typings"|Opts]) -> {show_succ, Opts};
-cl(["--show-success-typings"|Opts]) -> {show_succ, Opts};
-cl(["--annotate"|Opts]) -> {{mode, ?ANNOTATE}, Opts};
-cl(["--annotate-inc-files"|Opts]) -> {{mode, ?ANNOTATE_INC_FILES}, Opts};
-cl(["--no_spec"|Opts]) -> {no_spec, Opts};
-cl(["--plt",Plt|Opts]) -> {{plt, Plt}, Opts};
-cl(["-D"++Def|Opts]) ->
- case Def of
- "" -> fatal_error("no variable name specified after -D");
- _ ->
- DefPair = process_def_list(re:split(Def, "=", [{return, list}])),
- {{def, DefPair}, Opts}
- end;
-cl(["-I",Dir|Opts]) -> {{inc, Dir}, Opts};
-cl(["-I"++Dir|Opts]) ->
- case Dir of
- "" -> fatal_error("no include directory specified after -I");
- _ -> {{inc, Dir}, Opts}
- end;
-cl(["-T"|Opts]) ->
- {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
- case Files of
- [] -> fatal_error("no file or directory specified after -T");
- [_|_] -> {{trusted, Files}, RestOpts}
- end;
-cl(["-r"|Opts]) ->
- {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
- {{files_r, Files}, RestOpts};
-cl(["-pa",Dir|Opts]) -> {{pa,Dir}, Opts};
-cl(["-pz",Dir|Opts]) -> {{pz,Dir}, Opts};
-cl(["-"++H|_]) -> fatal_error("unknown option -"++H);
-cl(Opts) ->
- {Files, RestOpts} = dialyzer_cl_parse:collect_args(Opts),
- {{files, Files}, RestOpts}.
-
-process_def_list(L) ->
- case L of
- [Name, Value] ->
- {ok, Tokens, _} = erl_scan:string(Value ++ "."),
- {ok, ErlValue} = erl_parse:parse_term(Tokens),
- {list_to_atom(Name), ErlValue};
- [Name] ->
- {list_to_atom(Name), true}
- end.
-
-%% Get information about files that the user trusts and wants to analyze
-analyze_result({files, Val}, Args, Analysis) ->
- NewVal = Args#args.files ++ Val,
- {Args#args{files = NewVal}, Analysis};
-analyze_result({files_r, Val}, Args, Analysis) ->
- NewVal = Args#args.files_r ++ Val,
- {Args#args{files_r = NewVal}, Analysis};
-analyze_result({trusted, Val}, Args, Analysis) ->
- NewVal = Args#args.trusted ++ Val,
- {Args#args{trusted = NewVal}, Analysis};
-analyze_result(edoc, Args, Analysis) ->
- {Args, Analysis#analysis{edoc = true}};
-%% Get useful information for actual analysis
-analyze_result({mode, Mode}, Args, Analysis) ->
- case Analysis#analysis.mode of
- undefined -> {Args, Analysis#analysis{mode = Mode}};
- OldMode -> mode_error(OldMode, Mode)
- end;
-analyze_result({def, Val}, Args, Analysis) ->
- NewVal = Analysis#analysis.macros ++ [Val],
- {Args, Analysis#analysis{macros = NewVal}};
-analyze_result({inc, Val}, Args, Analysis) ->
- NewVal = Analysis#analysis.includes ++ [Val],
- {Args, Analysis#analysis{includes = NewVal}};
-analyze_result({plt, Plt}, Args, Analysis) ->
- {Args, Analysis#analysis{plt = Plt}};
-analyze_result(show_succ, Args, Analysis) ->
- {Args, Analysis#analysis{show_succ = true}};
-analyze_result(no_spec, Args, Analysis) ->
- {Args, Analysis#analysis{no_spec = true}};
-analyze_result({pa, Dir}, Args, Analysis) ->
- true = code:add_patha(Dir),
- {Args, Analysis};
-analyze_result({pz, Dir}, Args, Analysis) ->
- true = code:add_pathz(Dir),
- {Args, Analysis}.
-
-%%--------------------------------------------------------------------
-%% File processing.
-%%--------------------------------------------------------------------
-
--spec get_all_files(args()) -> [file:filename(),...].
-
-get_all_files(#args{files = Fs, files_r = Ds}) ->
- case filter_fd(Fs, Ds, fun test_erl_file_exclude_ann/1) of
- [] -> fatal_error("no file(s) to analyze");
- AllFiles -> AllFiles
- end.
-
--spec test_erl_file_exclude_ann(file:filename()) -> boolean().
-
-test_erl_file_exclude_ann(File) ->
- case is_erl_file(File) of
- true -> %% Exclude files ending with ".ann.erl"
- case re:run(File, "[\.]ann[\.]erl$") of
- {match, _} -> false;
- nomatch -> true
- end;
- false -> false
- end.
-
--spec is_erl_file(file:filename()) -> boolean().
-
-is_erl_file(File) ->
- filename:extension(File) =:= ".erl".
-
--type test_file_fun() :: fun((file:filename()) -> boolean()).
-
--spec filter_fd(files(), files(), test_file_fun()) -> files().
-
-filter_fd(File_Dir, Dir_R, Fun) ->
- All_File_1 = process_file_and_dir(File_Dir, Fun),
- All_File_2 = process_dir_rec(Dir_R, Fun),
- remove_dup(All_File_1 ++ All_File_2).
-
--spec process_file_and_dir(files(), test_file_fun()) -> files().
-
-process_file_and_dir(File_Dir, TestFun) ->
- Fun =
- fun (Elem, Acc) ->
- case filelib:is_regular(Elem) of
- true -> process_file(Elem, TestFun, Acc);
- false -> check_dir(Elem, false, Acc, TestFun)
- end
- end,
- lists:foldl(Fun, [], File_Dir).
-
--spec process_dir_rec(files(), test_file_fun()) -> files().
-
-process_dir_rec(Dirs, TestFun) ->
- Fun = fun (Dir, Acc) -> check_dir(Dir, true, Acc, TestFun) end,
- lists:foldl(Fun, [], Dirs).
-
--spec check_dir(file:filename(), boolean(), files(), test_file_fun()) -> files().
-
-check_dir(Dir, Recursive, Acc, Fun) ->
- case file:list_dir(Dir) of
- {ok, Files} ->
- {TmpDirs, TmpFiles} = split_dirs_and_files(Files, Dir),
- case Recursive of
- false ->
- FinalFiles = process_file_and_dir(TmpFiles, Fun),
- Acc ++ FinalFiles;
- true ->
- TmpAcc1 = process_file_and_dir(TmpFiles, Fun),
- TmpAcc2 = process_dir_rec(TmpDirs, Fun),
- Acc ++ TmpAcc1 ++ TmpAcc2
- end;
- {error, eacces} ->
- fatal_error("no access permission to dir \""++Dir++"\"");
- {error, enoent} ->
- fatal_error("cannot access "++Dir++": No such file or directory");
- {error, _Reason} ->
- fatal_error("error involving a use of file:list_dir/1")
- end.
-
-%% Same order as the input list
--spec process_file(file:filename(), test_file_fun(), files()) -> files().
-
-process_file(File, TestFun, Acc) ->
- case TestFun(File) of
- true -> Acc ++ [File];
- false -> Acc
- end.
-
-%% Same order as the input list
--spec split_dirs_and_files(files(), file:filename()) -> {files(), files()}.
-
-split_dirs_and_files(Elems, Dir) ->
- Test_Fun =
- fun (Elem, {DirAcc, FileAcc}) ->
- File = filename:join(Dir, Elem),
- case filelib:is_regular(File) of
- false -> {[File|DirAcc], FileAcc};
- true -> {DirAcc, [File|FileAcc]}
- end
- end,
- {Dirs, Files} = lists:foldl(Test_Fun, {[], []}, Elems),
- {lists:reverse(Dirs), lists:reverse(Files)}.
-
-%% Removes duplicate filenames but keeps the order of the input list
--spec remove_dup(files()) -> files().
-
-remove_dup(Files) ->
- Test_Dup = fun (File, Acc) ->
- case lists:member(File, Acc) of
- true -> Acc;
- false -> [File|Acc]
- end
- end,
- Reversed_Elems = lists:foldl(Test_Dup, [], Files),
- lists:reverse(Reversed_Elems).
-
-%%--------------------------------------------------------------------
-%% Collect information.
-%%--------------------------------------------------------------------
-
--type inc_file_info() :: {file:filename(), func_info()}.
-
--record(tmpAcc, {file :: file:filename(),
- module :: atom(),
- funcAcc = [] :: [func_info()],
- incFuncAcc = [] :: [inc_file_info()],
- dialyzerObj = [] :: [{mfa(), {_, _}}]}).
-
--spec collect_info(analysis()) -> analysis().
-
-collect_info(Analysis) ->
- NewPlt =
- try get_dialyzer_plt(Analysis) of
- DialyzerPlt ->
- dialyzer_plt:merge_plts([Analysis#analysis.trust_plt, DialyzerPlt])
- catch
- throw:{dialyzer_error,_Reason} ->
- fatal_error("Dialyzer's PLT is missing or is not up-to-date; please (re)create it")
- end,
- NewAnalysis = lists:foldl(fun collect_one_file_info/2,
- Analysis#analysis{trust_plt = NewPlt},
- Analysis#analysis.files),
- %% Process Remote Types
- TmpCServer = NewAnalysis#analysis.codeserver,
- NewCServer =
- try
- NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer),
- NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer),
- OldRecords = dialyzer_plt:get_types(NewPlt),
- OldExpTypes = dialyzer_plt:get_exported_types(NewPlt),
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
- MergedExpTypes = sets:union(NewExpTypes, OldExpTypes),
- %% io:format("Merged Records ~p",[MergedRecords]),
- TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer),
- TmpCServer2 =
- dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
- TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
- dialyzer_contracts:process_contract_remote_types(TmpCServer3)
- catch
- throw:{error, ErrorMsg} ->
- fatal_error(ErrorMsg)
- end,
- NewAnalysis#analysis{codeserver = NewCServer}.
-
-collect_one_file_info(File, Analysis) ->
- Ds = [{d,Name,Val} || {Name,Val} <- Analysis#analysis.macros],
- %% Current directory should also be included in "Includes".
- Includes = [filename:dirname(File)|Analysis#analysis.includes],
- Is = [{i,Dir} || Dir <- Includes],
- Options = dialyzer_utils:src_compiler_opts() ++ Is ++ Ds,
- case dialyzer_utils:get_abstract_code_from_src(File, Options) of
- {error, Reason} ->
- %% io:format("File=~p\n,Options=~p\n,Error=~p\n", [File,Options,Reason]),
- compile_error(Reason);
- {ok, AbstractCode} ->
- case dialyzer_utils:get_core_from_abstract_code(AbstractCode, Options) of
- error -> compile_error(["Could not get core erlang for "++File]);
- {ok, Core} ->
- case dialyzer_utils:get_record_and_type_info(AbstractCode) of
- {error, Reason} -> compile_error([Reason]);
- {ok, Records} ->
- Mod = cerl:concrete(cerl:module_name(Core)),
- case dialyzer_utils:get_spec_info(Mod, AbstractCode, Records) of
- {error, Reason} -> compile_error([Reason]);
- {ok, SpecInfo, CbInfo} ->
- ExpTypes = get_exported_types_from_core(Core),
- analyze_core_tree(Core, Records, SpecInfo, CbInfo,
- ExpTypes, Analysis, File)
- end
- end
- end
- end.
-
-analyze_core_tree(Core, Records, SpecInfo, CbInfo, ExpTypes, Analysis, File) ->
- Module = cerl:concrete(cerl:module_name(Core)),
- TmpTree = cerl:from_records(Core),
- CS1 = Analysis#analysis.codeserver,
- NextLabel = dialyzer_codeserver:get_next_core_label(CS1),
- {Tree, NewLabel} = cerl_trees:label(TmpTree, NextLabel),
- CS2 = dialyzer_codeserver:insert(Module, Tree, CS1),
- CS3 = dialyzer_codeserver:set_next_core_label(NewLabel, CS2),
- CS4 = dialyzer_codeserver:store_temp_records(Module, Records, CS3),
- CS5 =
- case Analysis#analysis.no_spec of
- true -> CS4;
- false ->
- dialyzer_codeserver:store_temp_contracts(Module, SpecInfo, CbInfo, CS4)
- end,
- OldExpTypes = dialyzer_codeserver:get_temp_exported_types(CS5),
- MergedExpTypes = sets:union(ExpTypes, OldExpTypes),
- CS6 = dialyzer_codeserver:insert_temp_exported_types(MergedExpTypes, CS5),
- Ex_Funcs = [{0,F,A} || {_,_,{F,A}} <- cerl:module_exports(Tree)],
- CG = Analysis#analysis.callgraph,
- {V, E} = dialyzer_callgraph:scan_core_tree(Tree, CG),
- dialyzer_callgraph:add_edges(E, V, CG),
- Fun = fun analyze_one_function/2,
- All_Defs = cerl:module_defs(Tree),
- Acc = lists:foldl(Fun, #tmpAcc{file = File, module = Module}, All_Defs),
- Exported_FuncMap = map__insert({File, Ex_Funcs}, Analysis#analysis.ex_func),
- %% we must sort all functions in the file which
- %% originate from this file by *numerical order* of lineNo
- Sorted_Functions = lists:keysort(1, Acc#tmpAcc.funcAcc),
- FuncMap = map__insert({File, Sorted_Functions}, Analysis#analysis.func),
- %% we do not need to sort functions which are imported from included files
- IncFuncMap = map__insert({File, Acc#tmpAcc.incFuncAcc},
- Analysis#analysis.inc_func),
- FMs = Analysis#analysis.fms ++ [{File, Module}],
- RecordMap = map__insert({File, Records}, Analysis#analysis.record),
- Analysis#analysis{fms = FMs,
- callgraph = CG,
- codeserver = CS6,
- ex_func = Exported_FuncMap,
- inc_func = IncFuncMap,
- record = RecordMap,
- func = FuncMap}.
-
-analyze_one_function({Var, FunBody} = Function, Acc) ->
- F = cerl:fname_id(Var),
- A = cerl:fname_arity(Var),
- TmpDialyzerObj = {{Acc#tmpAcc.module, F, A}, Function},
- NewDialyzerObj = Acc#tmpAcc.dialyzerObj ++ [TmpDialyzerObj],
- Anno = cerl:get_ann(FunBody),
- LineNo = get_line(Anno),
- FileName = get_file(Anno),
- BaseName = filename:basename(FileName),
- FuncInfo = {LineNo, F, A},
- OriginalName = Acc#tmpAcc.file,
- {FuncAcc, IncFuncAcc} =
- case (FileName =:= OriginalName) orelse (BaseName =:= OriginalName) of
- true -> %% Coming from original file
- %% io:format("Added function ~p\n", [{LineNo, F, A}]),
- {Acc#tmpAcc.funcAcc ++ [FuncInfo], Acc#tmpAcc.incFuncAcc};
- false ->
- %% Coming from other sourses, including:
- %% -- .yrl (yecc-generated file)
- %% -- yeccpre.hrl (yecc-generated file)
- %% -- other cases
- {Acc#tmpAcc.funcAcc, Acc#tmpAcc.incFuncAcc ++ [{FileName, FuncInfo}]}
- end,
- Acc#tmpAcc{funcAcc = FuncAcc,
- incFuncAcc = IncFuncAcc,
- dialyzerObj = NewDialyzerObj}.
-
-get_line([Line|_]) when is_integer(Line) -> Line;
-get_line([_|T]) -> get_line(T);
-get_line([]) -> none.
-
-get_file([{file,File}|_]) -> File;
-get_file([_|T]) -> get_file(T);
-get_file([]) -> "no_file". % should not happen
-
--spec get_dialyzer_plt(analysis()) -> plt().
-
-get_dialyzer_plt(#analysis{plt = PltFile0}) ->
- PltFile =
- case PltFile0 =:= none of
- true -> dialyzer_plt:get_default_plt();
- false -> PltFile0
- end,
- dialyzer_plt:from_file(PltFile).
-
-%% Exported Types
-
-get_exported_types_from_core(Core) ->
- Attrs = cerl:module_attrs(Core),
- ExpTypes1 = [cerl:concrete(L2) || {L1, L2} <- Attrs,
- cerl:is_literal(L1),
- cerl:is_literal(L2),
- cerl:concrete(L1) =:= 'export_type'],
- ExpTypes2 = lists:flatten(ExpTypes1),
- M = cerl:atom_val(cerl:module_name(Core)),
- sets:from_list([{M, F, A} || {F, A} <- ExpTypes2]).
-
-%%--------------------------------------------------------------------
-%% Utilities for error reporting.
-%%--------------------------------------------------------------------
-
--spec fatal_error(string()) -> no_return().
-
-fatal_error(Slogan) ->
- msg(io_lib:format("typer: ~s\n", [Slogan])),
- erlang:halt(1).
-
--spec mode_error(mode(), mode()) -> no_return().
-
-mode_error(OldMode, NewMode) ->
- Msg = io_lib:format("Mode was previously set to '~s'; "
- "can not set it to '~s' now",
- [OldMode, NewMode]),
- fatal_error(Msg).
-
--spec compile_error([string()]) -> no_return().
-
-compile_error(Reason) ->
- JoinedString = lists:flatten([X ++ "\n" || X <- Reason]),
- Msg = "Analysis failed with error report:\n" ++ JoinedString,
- fatal_error(Msg).
-
--spec msg(string()) -> 'ok'.
-
-msg(Msg) ->
- io:format(standard_error, "~s", [Msg]).
-
-%%--------------------------------------------------------------------
-%% Version and help messages.
-%%--------------------------------------------------------------------
-
--spec version_message() -> no_return().
-
-version_message() ->
- io:format("TypEr version "++?VSN++"\n"),
- erlang:halt(0).
-
--spec help_message() -> no_return().
-
-help_message() ->
- S = <<" Usage: typer [--help] [--version] [--plt PLT] [--edoc]
- [--show | --show-exported | --annotate | --annotate-inc-files]
- [-Ddefine]* [-I include_dir]* [-pa dir]* [-pz dir]*
- [-T application]* [-r] file*
-
- Options:
- -r dir*
- search directories recursively for .erl files below them
- --show
- Prints type specifications for all functions on stdout.
- (this is the default behaviour; this option is not really needed)
- --show-exported (or --show_exported)
- Same as --show, but prints specifications for exported functions only
- Specs are displayed sorted alphabetically on the function's name
- --annotate
- Annotates the specified files with type specifications
- --annotate-inc-files
- Same as --annotate but annotates all -include() files as well as
- all .erl files (use this option with caution - has not been tested much)
- --edoc
- Prints type information as Edoc @spec comments, not as type specs
- --plt PLT
- Use the specified dialyzer PLT file rather than the default one
- -T file*
- The specified file(s) already contain type specifications and these
- are to be trusted in order to print specs for the rest of the files
- (Multiple files or dirs, separated by spaces, can be specified.)
- -Dname (or -Dname=value)
- pass the defined name(s) to TypEr
- (The syntax of defines is the same as that used by \"erlc\".)
- -I include_dir
- pass the include_dir to TypEr
- (The syntax of includes is the same as that used by \"erlc\".)
- -pa dir
- -pz dir
- Set code path options to TypEr
- (This is useful for files that use parse tranforms.)
- --version (or -v)
- prints the Typer version and exits
- --help (or -h)
- prints this message and exits
-
- Note:
- * denotes that multiple occurrences of these options are possible.
-">>,
- io:put_chars(S),
- erlang:halt(0).
-
-%%--------------------------------------------------------------------
-%% Handle messages.
-%%--------------------------------------------------------------------
-
-rcv_ext_types() ->
- Self = self(),
- Self ! {Self, done},
- rcv_ext_types(Self, []).
-
-rcv_ext_types(Self, ExtTypes) ->
- receive
- {Self, ext_types, ExtType} ->
- rcv_ext_types(Self, [ExtType|ExtTypes]);
- {Self, done} ->
- lists:usort(ExtTypes)
- end.
-
-%%--------------------------------------------------------------------
-%% A convenient abstraction of a Key-Value mapping data structure
-%% specialized for the uses in this module
-%%--------------------------------------------------------------------
-
--type map_dict() :: dict:dict().
-
--spec map__new() -> map_dict().
-map__new() ->
- dict:new().
-
--spec map__insert({term(), term()}, map_dict()) -> map_dict().
-map__insert(Object, Map) ->
- {Key, Value} = Object,
- dict:store(Key, Value, Map).
-
--spec map__lookup(term(), map_dict()) -> term().
-map__lookup(Key, Map) ->
- try dict:fetch(Key, Map) catch error:_ -> none end.
-
--spec map__from_list([{fa(), term()}]) -> map_dict().
-map__from_list(List) ->
- dict:from_list(List).
-
--spec map__remove(term(), map_dict()) -> map_dict().
-map__remove(Key, Dict) ->
- dict:erase(Key, Dict).
-
--spec map__fold(fun((term(), term(), term()) -> map_dict()), map_dict(), map_dict()) -> map_dict().
-map__fold(Fun, Acc0, Dict) ->
- dict:fold(Fun, Acc0, Dict).
diff --git a/lib/typer/test/Makefile b/lib/typer/test/Makefile
deleted file mode 100644
index fb5570d9f0..0000000000
--- a/lib/typer/test/Makefile
+++ /dev/null
@@ -1,65 +0,0 @@
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-
-MODULES= \
- typer_SUITE
-
-ERL_FILES= $(MODULES:%=%.erl)
-
-TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR))
-INSTALL_PROGS= $(TARGET_FILES)
-
-EMAKEFILE=Emakefile
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/typer_test
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-
-ERL_MAKE_FLAGS +=
-ERL_COMPILE_FLAGS +=
-
-EBIN = .
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-make_emakefile:
- $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) $(MODULES) \
- > $(EMAKEFILE)
- $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) '*_SUITE_make' \
- >> $(EMAKEFILE)
-
-tests debug opt: make_emakefile
- erl $(ERL_MAKE_FLAGS) -make
-
-clean:
- rm -f $(EMAKEFILE)
- rm -f $(TARGET_FILES) $(GEN_FILES)
- rm -f core
-
-docs:
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
-
-release_tests_spec: make_emakefile
- $(INSTALL_DIR) "$(RELSYSDIR)"
- $(INSTALL_DATA) $(EMAKEFILE) $(ERL_FILES) "$(RELSYSDIR)"
- $(INSTALL_DATA) typer.spec "$(RELSYSDIR)"
- chmod -R u+w "$(RELSYSDIR)"
-
-release_docs_spec:
diff --git a/lib/typer/test/typer.spec b/lib/typer/test/typer.spec
deleted file mode 100644
index 79f51b6781..0000000000
--- a/lib/typer/test/typer.spec
+++ /dev/null
@@ -1 +0,0 @@
-{suites,"../typer_test",all}.
diff --git a/lib/typer/test/typer_SUITE.erl b/lib/typer/test/typer_SUITE.erl
deleted file mode 100644
index 25f0229640..0000000000
--- a/lib/typer/test/typer_SUITE.erl
+++ /dev/null
@@ -1,57 +0,0 @@
-%% ``Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
-%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
-%% AB. All Rights Reserved.''
-%%
--module(typer_SUITE).
-
--compile([export_all]).
--include_lib("common_test/include/ct.hrl").
-
-suite() ->
- [{ct_hooks, [ts_install_cth]}].
-
-all() ->
- case application:ensure_all_started(typer) of
- {ok, Apps} ->
- [application:stop(App) || App <- lists:reverse(Apps)],
- [app, appup];
- _ ->
- [appup]
- end.
-
-groups() ->
- [].
-
-init_per_suite(Config) ->
- Config.
-
-end_per_suite(_Config) ->
- ok.
-
-init_per_group(_GroupName, Config) ->
- Config.
-
-end_per_group(_GroupName, Config) ->
- Config.
-
-app() ->
- [{doc, "Test that the typer app file is ok"}].
-app(Config) when is_list(Config) ->
- ok = ?t:app_test(typer).
-
-appup() ->
- [{doc, "Test that the typer appup file is ok"}].
-appup(Config) when is_list(Config) ->
- ok = ?t:appup_test(typer).
diff --git a/lib/typer/vsn.mk b/lib/typer/vsn.mk
deleted file mode 100644
index ed12e067c1..0000000000
--- a/lib/typer/vsn.mk
+++ /dev/null
@@ -1 +0,0 @@
-TYPER_VSN = 0.9.11
diff --git a/lib/wx/api_gen/gen_util.erl b/lib/wx/api_gen/gen_util.erl
index cd42ad2d96..49a3cb521e 100644
--- a/lib/wx/api_gen/gen_util.erl
+++ b/lib/wx/api_gen/gen_util.erl
@@ -203,7 +203,7 @@ replace_and_remove([$; | R], Acc) ->
replace_and_remove([$@ | R], Acc) ->
replace_and_remove(R, [directive|Acc]);
-replace_and_remove([_E|R], Acc) -> %% Ignore everthing else
+replace_and_remove([_E|R], Acc) -> %% Ignore everything else
replace_and_remove(R, Acc);
replace_and_remove([], Acc) ->
Acc.
diff --git a/lib/wx/api_gen/wx_gen_cpp.erl b/lib/wx/api_gen/wx_gen_cpp.erl
index d4b6db8153..4b208001a0 100644
--- a/lib/wx/api_gen/wx_gen_cpp.erl
+++ b/lib/wx/api_gen/wx_gen_cpp.erl
@@ -627,7 +627,7 @@ decode_arg(N,#type{name="wxArrayString"},Place,A0) ->
w(" int * ~sLen = (int *) bp; bp += 4;~n", [N]),
case Place of
arg -> w(" wxArrayString ~s;~n", [N]);
- opt -> ignore %% Allready declared
+ opt -> ignore %% Already declared
end,
w(" int ~sASz = 0, * ~sTemp;~n", [N,N]),
w(" for(int i=0; i < *~sLen; i++) {~n", [N]),
diff --git a/lib/xmerl/src/xmerl_regexp.erl b/lib/xmerl/src/xmerl_regexp.erl
index fc89b80ff1..566b77725f 100644
--- a/lib/xmerl/src/xmerl_regexp.erl
+++ b/lib/xmerl/src/xmerl_regexp.erl
@@ -1154,7 +1154,7 @@ comp_crs([], Last) -> [{Last,maxchar}].
%% build_dfa(NFA, NfaStartState) -> {DFA,DfaStartState}.
%% Build a DFA from an NFA using "subset construction". The major
%% difference from the book is that we keep the marked and unmarked
-%% DFA states in seperate lists. New DFA states are added to the
+%% DFA states in separate lists. New DFA states are added to the
%% unmarked list and states are marked by moving them to the marked
%% list. We assume that the NFA accepting state numbers are in
%% ascending order for the rules and use ordsets to keep this order.
diff --git a/lib/xmerl/src/xmerl_sax_parser.erl b/lib/xmerl/src/xmerl_sax_parser.erl
index 318a0cf7f4..1aef6c58c4 100644
--- a/lib/xmerl/src/xmerl_sax_parser.erl
+++ b/lib/xmerl/src/xmerl_sax_parser.erl
@@ -1,7 +1,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -33,6 +33,7 @@
%% External exports
%%----------------------------------------------------------------------
-export([file/2,
+ stream/3,
stream/2]).
%%----------------------------------------------------------------------
@@ -72,11 +73,12 @@ file(Name,Options) ->
File = filename:basename(Name),
ContinuationFun = fun default_continuation_cb/1,
Res = stream(<<>>,
- [{continuation_fun, ContinuationFun},
- {continuation_state, FD},
- {current_location, CL},
- {entity, File}
- |Options]),
+ [{continuation_fun, ContinuationFun},
+ {continuation_state, FD},
+ {current_location, CL},
+ {entity, File}
+ |Options],
+ file),
ok = file:close(FD),
Res
end.
@@ -92,19 +94,22 @@ file(Name,Options) ->
%% EventState = term()
%% Description: Parse a stream containing an XML document.
%%----------------------------------------------------------------------
-stream(Xml, Options) when is_list(Xml), is_list(Options) ->
+stream(Xml, Options) ->
+ stream(Xml, Options, stream).
+
+stream(Xml, Options, InputType) when is_list(Xml), is_list(Options) ->
State = parse_options(Options, initial_state()),
- case State#xmerl_sax_parser_state.file_type of
+ case State#xmerl_sax_parser_state.file_type of
dtd ->
xmerl_sax_parser_list:parse_dtd(Xml,
State#xmerl_sax_parser_state{encoding = list,
- input_type = stream});
+ input_type = InputType});
normal ->
xmerl_sax_parser_list:parse(Xml,
State#xmerl_sax_parser_state{encoding = list,
- input_type = stream})
+ input_type = InputType})
end;
-stream(Xml, Options) when is_binary(Xml), is_list(Options) ->
+stream(Xml, Options, InputType) when is_binary(Xml), is_list(Options) ->
case parse_options(Options, initial_state()) of
{error, Reason} -> {error, Reason};
State ->
@@ -127,7 +132,7 @@ stream(Xml, Options) when is_binary(Xml), is_list(Options) ->
State#xmerl_sax_parser_state.event_state};
{Xml1, State1} ->
parse_binary(Xml1,
- State1#xmerl_sax_parser_state{input_type = stream},
+ State1#xmerl_sax_parser_state{input_type = InputType},
ParseFunction)
end
end.
@@ -226,12 +231,12 @@ check_encoding_option(E) ->
%% Description: Detects which character set is used in a binary stream.
%%----------------------------------------------------------------------
detect_charset(<<>>, #xmerl_sax_parser_state{continuation_fun = undefined} = _) ->
- throw({error, "Can't detect character encoding due to no indata"});
+ {error, "Can't detect character encoding due to no indata"};
detect_charset(<<>>, #xmerl_sax_parser_state{continuation_fun = CFun,
continuation_state = CState} = State) ->
case CFun(CState) of
{<<>>, _} ->
- throw({error, "Can't detect character encoding due to lack of indata"});
+ {error, "Can't detect character encoding due to lack of indata"};
{NewBytes, NewContState} ->
detect_charset(NewBytes, State#xmerl_sax_parser_state{continuation_state = NewContState})
end;
diff --git a/lib/xmerl/src/xmerl_sax_parser.hrl b/lib/xmerl/src/xmerl_sax_parser.hrl
index 932ab0cec5..7f9bf6c4d3 100644
--- a/lib/xmerl/src/xmerl_sax_parser.hrl
+++ b/lib/xmerl/src/xmerl_sax_parser.hrl
@@ -88,14 +88,7 @@
current_location, % Location of the currently parsed XML entity
entity, % Parsed XML entity
skip_external_dtd = false,% If true the external DTD is skipped during parsing
- input_type % Source type: file | stream.
- % This field is a preparation for an fix in R17 of a bug in
- % the conformance against the standard.
- % Today a file which contains two XML documents will be considered
- % well-formed and the second is placed in the rest part of the
- % return tuple, according to the conformance tests this should fail.
- % In the future this will fail if xmerl_sax_aprser:file/2 is used but
- % left to the user in the xmerl_sax_aprser:stream/2 case.
+ input_type % Source type: file | stream
}).
diff --git a/lib/xmerl/src/xmerl_sax_parser_base.erlsrc b/lib/xmerl/src/xmerl_sax_parser_base.erlsrc
index 4d75805b9b..f3470b2809 100644
--- a/lib/xmerl/src/xmerl_sax_parser_base.erlsrc
+++ b/lib/xmerl/src/xmerl_sax_parser_base.erlsrc
@@ -1,7 +1,7 @@
%%-*-erlang-*-
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -72,7 +72,12 @@ parse(Xml, State) ->
{ok, Rest, State2} ->
State3 = event_callback(endDocument, State2),
ets:delete(RefTable),
- {ok, State3#xmerl_sax_parser_state.event_state, Rest};
+ case check_if_rest_ok(State3#xmerl_sax_parser_state.input_type, Rest) of
+ true ->
+ {ok, State3#xmerl_sax_parser_state.event_state, Rest};
+ false ->
+ format_error(fatal_error, State3, "Input found after legal document")
+ end;
{fatal_error, {State2, Reason}} ->
State3 = event_callback(endDocument, State2),
ets:delete(RefTable),
@@ -81,10 +86,14 @@ parse(Xml, State) ->
State3 = event_callback(endDocument, State2),
ets:delete(RefTable),
format_error(Tag, State3, Reason);
+ {endDocument, Rest, State2} ->
+ State3 = event_callback(endDocument, State2),
+ ets:delete(RefTable),
+ {ok, State3#xmerl_sax_parser_state.event_state, Rest};
Other ->
_State2 = event_callback(endDocument, State1),
ets:delete(RefTable),
- throw(Other)
+ {fatal_error, Other}
end.
%%----------------------------------------------------------------------
@@ -111,7 +120,7 @@ parse_dtd(Xml, State) ->
{Rest, State2} when is_record(State2, xmerl_sax_parser_state) ->
State3 = event_callback(endDocument, State2),
ets:delete(RefTable),
- {ok, State3#xmerl_sax_parser_state.event_state, Rest};
+ {ok, State3#xmerl_sax_parser_state.event_state, Rest};
{endDocument, Rest, State2} when is_record(State2, xmerl_sax_parser_state) ->
State3 = event_callback(endDocument, State2),
ets:delete(RefTable),
@@ -119,7 +128,7 @@ parse_dtd(Xml, State) ->
Other ->
_State2 = event_callback(endDocument, State1),
ets:delete(RefTable),
- throw(Other)
+ {fatal_error, Other}
end.
@@ -136,10 +145,11 @@ parse_dtd(Xml, State) ->
%% [1] document ::= prolog element Misc*
%%----------------------------------------------------------------------
parse_document(Rest, State) when is_record(State, xmerl_sax_parser_state) ->
- {Rest1, State1} = parse_xml_decl(Rest, State),
+ {Rest1, State1} = parse_byte_order_mark(Rest, State),
{Rest2, State2} = parse_misc(Rest1, State1, true),
{ok, Rest2, State2}.
+?PARSE_BYTE_ORDER_MARK(Bytes, State).
%%----------------------------------------------------------------------
%% Function: parse_xml_decl(Rest, State) -> Result
@@ -150,15 +160,8 @@ parse_document(Rest, State) when is_record(State, xmerl_sax_parser_state) ->
%% [22] prolog ::= XMLDecl? Misc* (doctypedecl Misc*)?
%% [23] XMLDecl ::= '<?xml' VersionInfo EncodingDecl? SDDecl? S? '?>'
%%----------------------------------------------------------------------
--dialyzer({[no_fail_call, no_match], parse_xml_decl/2}).
parse_xml_decl(?STRING_EMPTY, State) ->
cf(?STRING_EMPTY, State, fun parse_xml_decl/2);
-parse_xml_decl(?BYTE_ORDER_MARK_1, State) ->
- cf(?BYTE_ORDER_MARK_1, State, fun parse_xml_decl/2);
-parse_xml_decl(?BYTE_ORDER_MARK_2, State) ->
- cf(?BYTE_ORDER_MARK_2, State, fun parse_xml_decl/2);
-parse_xml_decl(?BYTE_ORDER_MARK_REST(Rest), State) ->
- cf(Rest, State, fun parse_xml_decl/2);
parse_xml_decl(?STRING("<") = Bytes, State) ->
cf(Bytes, State, fun parse_xml_decl/2);
parse_xml_decl(?STRING("<?") = Bytes, State) ->
@@ -170,31 +173,19 @@ parse_xml_decl(?STRING("<?xm") = Bytes, State) ->
parse_xml_decl(?STRING("<?xml") = Bytes, State) ->
cf(Bytes, State, fun parse_xml_decl/2);
parse_xml_decl(?STRING_REST("<?xml", Rest1), State) ->
- parse_xml_decl_1(Rest1, State);
-parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) ->
- case unicode:characters_to_list(Bytes, Enc) of
- {incomplete, _, _} ->
- cf(Bytes, State, fun parse_xml_decl/2);
- {error, _Encoded, _Rest} ->
- ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])));
- _ ->
- parse_prolog(Bytes, State)
- end;
-parse_xml_decl(Bytes, State) ->
- parse_prolog(Bytes, State).
-
+ parse_xml_decl_rest(Rest1, State);
+?PARSE_XML_DECL(Bytes, State).
-parse_xml_decl_1(?STRING_UNBOUND_REST(C, Rest) = Bytes, State) ->
+parse_xml_decl_rest(?STRING_UNBOUND_REST(C, Rest) = Bytes, State) ->
if
?is_whitespace(C) ->
{_XmlAttributes, Rest1, State1} = parse_version_info(Rest, State, []),
- %State2 = event_callback({processingInstruction, "xml", XmlAttributes}, State1),% The XML decl. should not be reported as a PI
parse_prolog(Rest1, State1);
true ->
parse_prolog(?STRING_REST("<?xml", Bytes), State)
end;
-parse_xml_decl_1(Bytes, State) ->
- unicode_incomplete_check([Bytes, State, fun parse_xml_decl_1/2], undefined).
+parse_xml_decl_rest(Bytes, State) ->
+ unicode_incomplete_check([Bytes, State, fun parse_xml_decl_rest/2], undefined).
@@ -216,8 +207,6 @@ parse_prolog(?STRING_REST("<?", Rest), State) ->
parse_prolog(Rest1, State1);
{endDocument, Rest1, State1} ->
parse_prolog(Rest1, State1)
- % IValue = ?TO_INPUT_FORMAT("<?"),
- % {?APPEND_STRING(IValue, Rest1), State1}
end;
parse_prolog(?STRING_REST("<!", Rest), State) ->
parse_prolog_1(Rest, State);
@@ -230,7 +219,6 @@ parse_prolog(Bytes, State) ->
unicode_incomplete_check([Bytes, State, fun parse_prolog/2],
"expecting < or whitespace").
-
parse_prolog_1(?STRING_EMPTY, State) ->
cf(?STRING_EMPTY, State, fun parse_prolog_1/2);
parse_prolog_1(?STRING("D") = Bytes, State) ->
@@ -442,6 +430,15 @@ check_if_new_doc_allowed(stream, []) ->
check_if_new_doc_allowed(_, _) ->
false.
+check_if_rest_ok(file, []) ->
+ true;
+check_if_rest_ok(file, <<>>) ->
+ true;
+check_if_rest_ok(stream, _) ->
+ true;
+check_if_rest_ok(_, _) ->
+ false.
+
%%----------------------------------------------------------------------
%% Function: parse_pi_1(Rest, State) -> Result
%% Input: Rest = string() | binary()
@@ -1024,16 +1021,21 @@ parse_etag(Bytes, State) ->
unicode_incomplete_check([Bytes, State, fun parse_etag/2],
undefined).
-
parse_etag_1(?STRING_REST(">", Rest),
#xmerl_sax_parser_state{end_tags=[{_ETag, Uri, LocalName, QName, OldNsList, NewNsList}
- |RestOfETags]} = State, _Tag) ->
+ |RestOfETags],
+ input_type=InputType} = State, _Tag) ->
State1 = event_callback({endElement, Uri, LocalName, QName}, State),
State2 = send_end_prefix_mapping_event(NewNsList, State1),
- parse_content(Rest,
- State2#xmerl_sax_parser_state{end_tags=RestOfETags,
- ns = OldNsList},
- [], true);
+ case check_if_new_doc_allowed(InputType, RestOfETags) of
+ true ->
+ throw({endDocument, Rest, State2#xmerl_sax_parser_state{ns = OldNsList}});
+ false ->
+ parse_content(Rest,
+ State2#xmerl_sax_parser_state{end_tags=RestOfETags,
+ ns = OldNsList},
+ [], true)
+ end;
parse_etag_1(?STRING_UNBOUND_REST(_C, _), State, Tag) ->
{P,TN} = Tag,
?fatal_error(State, "Bad EndTag: " ++ P ++ ":" ++ TN);
@@ -1051,21 +1053,26 @@ parse_etag_1(Bytes, State, Tag) ->
%% Description: Parsing the content part of tags
%% [43] content ::= (element | CharData | Reference | CDSect | PI | Comment)*
%%----------------------------------------------------------------------
-
parse_content(?STRING_EMPTY, State, Acc, IgnorableWS) ->
- case catch cf(?STRING_EMPTY, State, Acc, IgnorableWS, fun parse_content/4) of
- {Rest, State1} when is_record(State1, xmerl_sax_parser_state) ->
- {Rest, State1};
- {fatal_error, {State1, Msg}} ->
- case check_if_document_complete(State1, Msg) of
- true ->
- State2 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State1),
- {?STRING_EMPTY, State2};
- false ->
- ?fatal_error(State1, Msg)
- end;
- Other ->
- throw(Other)
+ case check_if_document_complete(State, "No more bytes") of
+ true ->
+ State1 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State),
+ {?STRING_EMPTY, State1};
+ false ->
+ case catch cf(?STRING_EMPTY, State, Acc, IgnorableWS, fun parse_content/4) of
+ {Rest, State1} when is_record(State1, xmerl_sax_parser_state) ->
+ {Rest, State1};
+ {fatal_error, {State1, Msg}} ->
+ case check_if_document_complete(State1, Msg) of
+ true ->
+ State2 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State1),
+ {?STRING_EMPTY, State2};
+ false ->
+ ?fatal_error(State1, Msg)
+ end;
+ Other ->
+ throw(Other)
+ end
end;
parse_content(?STRING("\r") = Bytes, State, Acc, IgnorableWS) ->
cf(Bytes, State, Acc, IgnorableWS, fun parse_content/4);
@@ -1094,7 +1101,7 @@ parse_content(?STRING_REST("<?", Rest), State, Acc, IgnorableWS) ->
parse_content(?STRING_REST("<!", Rest1) = Rest, #xmerl_sax_parser_state{end_tags = ET} = State, Acc, IgnorableWS) ->
case ET of
[] ->
- {Rest, State}; %%LATH : Skicka ignorable WS ???
+ {Rest, State}; %% Skicka ignorable WS ???
_ ->
State1 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State),
parse_cdata(Rest1, State1)
@@ -1102,7 +1109,7 @@ parse_content(?STRING_REST("<!", Rest1) = Rest, #xmerl_sax_parser_state{end_tags
parse_content(?STRING_REST("<", Rest1) = Rest, #xmerl_sax_parser_state{end_tags = ET} = State, Acc, IgnorableWS) ->
case ET of
[] ->
- {Rest, State}; %%LATH : Skicka ignorable WS ???
+ {Rest, State}; %% Skicka ignorable WS ???
_ ->
State1 = send_character_event(length(Acc), IgnorableWS, lists:reverse(Acc), State),
parse_stag(Rest1, State1)
@@ -1204,7 +1211,6 @@ send_character_event(_, true, String, State) ->
%% Description: Parse whitespaces.
%% [3] S ::= (#x20 | #x9 | #xD | #xA)+
%%----------------------------------------------------------------------
--dialyzer({no_fail_call, whitespace/3}).
whitespace(?STRING_EMPTY, State, Acc) ->
case cf(?STRING_EMPTY, State, Acc, fun whitespace/3) of
{?STRING_EMPTY, State} ->
@@ -1230,16 +1236,7 @@ whitespace(?STRING_REST("\r", Rest), State, Acc) ->
whitespace(Rest, State#xmerl_sax_parser_state{line_no=N+1}, [?lf |Acc]);
whitespace(?STRING_UNBOUND_REST(C, Rest), State, Acc) when ?is_whitespace(C) ->
whitespace(Rest, State, [C|Acc]);
-whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) ->
- {lists:reverse(Acc), Bytes, State};
-whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) ->
- case unicode:characters_to_list(Bytes, Enc) of
- {incomplete, _, _} ->
- cf(Bytes, State, Acc, fun whitespace/3);
- {error, _Encoded, _Rest} ->
- ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])))
- end.
-
+?WHITESPACE(Bytes, State, Acc).
%%----------------------------------------------------------------------
%% Function: parse_reference(Rest, State, HaveToExist) -> Result
@@ -1362,7 +1359,6 @@ parse_pe_reference_1(Bytes, State, Name) ->
"missing ; after reference " ++ Name).
-
%%----------------------------------------------------------------------
%% Function: insert_reference(Reference, State) -> Result
%% Parameters: Reference = string()
@@ -1378,7 +1374,6 @@ insert_reference({Name, Type, Value}, Table) ->
end.
-
%%----------------------------------------------------------------------
%% Function: look_up_reference(Reference, State) -> Result
%% Parameters: Reference = string()
@@ -1693,7 +1688,7 @@ handle_external_entity({http, Url}, State) ->
++ file:format_error(Reason));
{ok, FD} ->
{?STRING_EMPTY, EntityState} =
- parse_external_entity_1(<<>>,
+ parse_external_entity_byte_order_mark(<<>>,
State#xmerl_sax_parser_state{continuation_state=FD,
current_location=filename:dirname(Url),
entity=filename:basename(Url),
@@ -1709,6 +1704,8 @@ handle_external_entity({http, Url}, State) ->
handle_external_entity({Tag, _Url}, State) ->
?fatal_error(State, "Unsupported URI type: " ++ atom_to_list(Tag)).
+?PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State).
+
%%----------------------------------------------------------------------
%% Function : parse_external_entity_1(Rest, State) -> Result
%% Parameters: Rest = string() | binary()
@@ -1716,7 +1713,6 @@ handle_external_entity({Tag, _Url}, State) ->
%% Result : {Rest, State}
%% Description: Parse the external entity.
%%----------------------------------------------------------------------
--dialyzer({[no_fail_call, no_match], parse_external_entity_1/2}).
parse_external_entity_1(?STRING_EMPTY, #xmerl_sax_parser_state{file_type=Type} = State) ->
case catch cf(?STRING_EMPTY, State, fun parse_external_entity_1/2) of
{Rest, State1} when is_record(State1, xmerl_sax_parser_state) ->
@@ -1726,12 +1722,6 @@ parse_external_entity_1(?STRING_EMPTY, #xmerl_sax_parser_state{file_type=Type} =
Other ->
throw(Other)
end;
-parse_external_entity_1(?BYTE_ORDER_MARK_1, State) ->
- cf(?BYTE_ORDER_MARK_1, State, fun parse_external_entity_1/2);
-parse_external_entity_1(?BYTE_ORDER_MARK_2, State) ->
- cf(?BYTE_ORDER_MARK_2, State, fun parse_external_entity_1/2);
-parse_external_entity_1(?BYTE_ORDER_MARK_REST(Rest), State) ->
- parse_external_entity_1(Rest, State);
parse_external_entity_1(?STRING("<") = Bytes, State) ->
cf(Bytes, State, fun parse_external_entity_1/2);
parse_external_entity_1(?STRING("<?") = Bytes, State) ->
@@ -3290,7 +3280,7 @@ cf(Rest, #xmerl_sax_parser_state{continuation_fun = CFun, continuation_state = C
catch
throw:ErrorTerm ->
?fatal_error(State, ErrorTerm);
- exit:Reason ->
+ exit:Reason ->
?fatal_error(State, {'EXIT', Reason})
end,
case Result of
diff --git a/lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc b/lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc
index 961806bf4c..6e59347fb8 100644
--- a/lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc
+++ b/lib/xmerl/src/xmerl_sax_parser_latin1.erlsrc
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -34,8 +34,36 @@
-define(APPEND_STRING(Rest, New), <<Rest/binary, New/binary>>).
-define(TO_INPUT_FORMAT(Val), unicode:characters_to_binary(Val, unicode, latin1)).
-%% STRING_REST and STRING_UNBOUND_REST is only different in the list case
-define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar, Rest/binary>>).
--define(BYTE_ORDER_MARK_1, undefined_bom1).
--define(BYTE_ORDER_MARK_2, undefined_bom2).
--define(BYTE_ORDER_MARK_REST(Rest), <<undefined, Rest/binary>>).
+
+-define(PARSE_BYTE_ORDER_MARK(Bytes, State),
+ parse_byte_order_mark(Bytes, State) ->
+ parse_xml_decl(Bytes, State)).
+
+-define(PARSE_XML_DECL(Bytes, State),
+ parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) ->
+ case unicode:characters_to_list(Bytes, Enc) of
+ {incomplete, _, _} ->
+ cf(Bytes, State, fun parse_xml_decl/2);
+ {error, _Encoded, _Rest} ->
+ ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])));
+ _ ->
+ parse_prolog(Bytes, State)
+ end;
+ parse_xml_decl(Bytes, State) ->
+ parse_prolog(Bytes, State)).
+
+-define(WHITESPACE(Bytes, State, Acc),
+ whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) ->
+ {lists:reverse(Acc), Bytes, State};
+ whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) ->
+ case unicode:characters_to_list(Bytes, Enc) of
+ {incomplete, _, _} ->
+ cf(Bytes, State, Acc, fun whitespace/3);
+ {error, _Encoded, _Rest} ->
+ ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])))
+ end).
+
+-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State),
+ parse_external_entity_byte_order_mark(Bytes, State) ->
+ parse_external_entity_1(Bytes, State)).
diff --git a/lib/xmerl/src/xmerl_sax_parser_list.erlsrc b/lib/xmerl/src/xmerl_sax_parser_list.erlsrc
index 624a621d92..6a4435b1d9 100644
--- a/lib/xmerl/src/xmerl_sax_parser_list.erlsrc
+++ b/lib/xmerl/src/xmerl_sax_parser_list.erlsrc
@@ -36,6 +36,19 @@
%% In the list case we can't use a '++' when matchin against an unbound variable
-define(STRING_UNBOUND_REST(MatchChar, Rest), [MatchChar | Rest]).
--define(BYTE_ORDER_MARK_1, undefined_bom1).
--define(BYTE_ORDER_MARK_2, undefined_bom2).
--define(BYTE_ORDER_MARK_REST(Rest), [undefined|Rest]).
+
+-define(PARSE_BYTE_ORDER_MARK(Bytes, State),
+ parse_byte_order_mark(Bytes, State) ->
+ parse_xml_decl(Bytes, State)).
+
+-define(PARSE_XML_DECL(Bytes, State),
+ parse_xml_decl(Bytes, State) ->
+ parse_prolog(Bytes, State)).
+
+-define(WHITESPACE(Bytes, State, Acc),
+ whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) ->
+ {lists:reverse(Acc), Bytes, State}).
+
+-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State),
+ parse_external_entity_byte_order_mark(Bytes, State) ->
+ parse_external_entity_1(Bytes, State)).
diff --git a/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc b/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc
index ff84ece97a..ec89024729 100644
--- a/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc
+++ b/lib/xmerl/src/xmerl_sax_parser_utf16be.erlsrc
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -34,8 +34,50 @@
-define(APPEND_STRING(Rest, New), <<Rest/binary, New/binary>>).
-define(TO_INPUT_FORMAT(Val), unicode:characters_to_binary(Val, unicode, {utf16, big})).
-%% STRING_REST and STRING_UNBOUND_REST is only different in the list case
-define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar/big-utf16, Rest/binary>>).
--define(BYTE_ORDER_MARK_1, undefined_bom1).
--define(BYTE_ORDER_MARK_2, <<16#FE>>).
+-define(BYTE_ORDER_MARK_1, <<16#FE>>).
-define(BYTE_ORDER_MARK_REST(Rest), <<16#FE, 16#FF, Rest/binary>>).
+
+-define(PARSE_BYTE_ORDER_MARK(Bytes, State),
+ parse_byte_order_mark(?STRING_EMPTY, State) ->
+ cf(?STRING_EMPTY, State, fun parse_byte_order_mark/2);
+ parse_byte_order_mark(?BYTE_ORDER_MARK_1, State) ->
+ cf(?BYTE_ORDER_MARK_1, State, fun parse_byte_order_mark/2);
+ parse_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) ->
+ parse_xml_decl(Rest, State);
+ parse_byte_order_mark(Bytes, State) ->
+ parse_xml_decl(Bytes, State)).
+
+-define(PARSE_XML_DECL(Bytes, State),
+ parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) ->
+ case unicode:characters_to_list(Bytes, Enc) of
+ {incomplete, _, _} ->
+ cf(Bytes, State, fun parse_xml_decl/2);
+ {error, _Encoded, _Rest} ->
+ ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])));
+ _ ->
+ parse_prolog(Bytes, State)
+ end;
+ parse_xml_decl(Bytes, State) ->
+ parse_prolog(Bytes, State)).
+
+-define(WHITESPACE(Bytes, State, Acc),
+ whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) ->
+ {lists:reverse(Acc), Bytes, State};
+ whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) ->
+ case unicode:characters_to_list(Bytes, Enc) of
+ {incomplete, _, _} ->
+ cf(Bytes, State, Acc, fun whitespace/3);
+ {error, _Encoded, _Rest} ->
+ ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])))
+ end).
+
+-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State),
+ parse_external_entity_byte_order_mark(?STRING_EMPTY, State) ->
+ cf(?STRING_EMPTY, State, fun parse_external_entity_byte_order_mark/2);
+ parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_1, State) ->
+ cf(?BYTE_ORDER_MARK_1, State, fun parse_external_entity_byte_order_mark/2);
+ parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) ->
+ parse_external_entity_1(Rest, State);
+ parse_external_entity_byte_order_mark(Bytes, State) ->
+ parse_external_entity_1(Bytes, State)).
diff --git a/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc b/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc
index a330fce8d0..566333a045 100644
--- a/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc
+++ b/lib/xmerl/src/xmerl_sax_parser_utf16le.erlsrc
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -34,8 +34,50 @@
-define(APPEND_STRING(Rest, New), <<Rest/binary, New/binary>>).
-define(TO_INPUT_FORMAT(Val), unicode:characters_to_binary(Val, unicode, {utf16, little})).
-%% STRING_REST and STRING_UNBOUND_REST is only different in the list case
-define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar/little-utf16, Rest/binary>>).
--define(BYTE_ORDER_MARK_1, undefined_bom1).
--define(BYTE_ORDER_MARK_2, <<16#FF>>).
+-define(BYTE_ORDER_MARK_1, <<16#FF>>).
-define(BYTE_ORDER_MARK_REST(Rest), <<16#FF, 16#FE, Rest/binary>>).
+
+-define(PARSE_BYTE_ORDER_MARK(Bytes, State),
+ parse_byte_order_mark(?STRING_EMPTY, State) ->
+ cf(?STRING_EMPTY, State, fun parse_byte_order_mark/2);
+ parse_byte_order_mark(?BYTE_ORDER_MARK_1, State) ->
+ cf(?BYTE_ORDER_MARK_1, State, fun parse_byte_order_mark/2);
+ parse_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) ->
+ parse_xml_decl(Rest, State);
+ parse_byte_order_mark(Bytes, State) ->
+ parse_xml_decl(Bytes, State)).
+
+-define(PARSE_XML_DECL(Bytes, State),
+ parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) ->
+ case unicode:characters_to_list(Bytes, Enc) of
+ {incomplete, _, _} ->
+ cf(Bytes, State, fun parse_xml_decl/2);
+ {error, _Encoded, _Rest} ->
+ ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])));
+ _ ->
+ parse_prolog(Bytes, State)
+ end;
+ parse_xml_decl(Bytes, State) ->
+ parse_prolog(Bytes, State)).
+
+-define(WHITESPACE(Bytes, State, Acc),
+ whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) ->
+ {lists:reverse(Acc), Bytes, State};
+ whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) ->
+ case unicode:characters_to_list(Bytes, Enc) of
+ {incomplete, _, _} ->
+ cf(Bytes, State, Acc, fun whitespace/3);
+ {error, _Encoded, _Rest} ->
+ ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])))
+ end).
+
+-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State),
+ parse_external_entity_byte_order_mark(?STRING_EMPTY, State) ->
+ cf(?STRING_EMPTY, State, fun parse_external_entity_byte_order_mark/2);
+ parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_1, State) ->
+ cf(?BYTE_ORDER_MARK_1, State, fun parse_external_entity_byte_order_mark/2);
+ parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) ->
+ parse_external_entity_1(Rest, State);
+ parse_external_entity_byte_order_mark(Bytes, State) ->
+ parse_external_entity_1(Bytes, State)).
diff --git a/lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc b/lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc
index d46d60d237..f41d06d013 100644
--- a/lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc
+++ b/lib/xmerl/src/xmerl_sax_parser_utf8.erlsrc
@@ -2,7 +2,7 @@
%%--------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -34,11 +34,55 @@
-define(APPEND_STRING(Rest, New), <<Rest/binary, New/binary>>).
-define(TO_INPUT_FORMAT(Val), unicode:characters_to_binary(Val, unicode, utf8)).
-
-%% STRING_REST and STRING_UNBOUND_REST is only different in the list case
-define(STRING_UNBOUND_REST(MatchChar, Rest), <<MatchChar/utf8, Rest/binary>>).
-define(BYTE_ORDER_MARK_1, <<16#EF>>).
-define(BYTE_ORDER_MARK_2, <<16#EF, 16#BB>>).
-define(BYTE_ORDER_MARK_REST(Rest), <<16#EF, 16#BB, 16#BF, Rest/binary>>).
+-define(PARSE_BYTE_ORDER_MARK(Bytes, State),
+ parse_byte_order_mark(?STRING_EMPTY, State) ->
+ cf(?STRING_EMPTY, State, fun parse_byte_order_mark/2);
+ parse_byte_order_mark(?BYTE_ORDER_MARK_1, State) ->
+ cf(?BYTE_ORDER_MARK_1, State, fun parse_byte_order_mark/2);
+ parse_byte_order_mark(?BYTE_ORDER_MARK_2, State) ->
+ cf(?BYTE_ORDER_MARK_2, State, fun parse_byte_order_mark/2);
+ parse_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) ->
+ parse_xml_decl(Rest, State);
+ parse_byte_order_mark(Bytes, State) ->
+ parse_xml_decl(Bytes, State)).
+
+-define(PARSE_XML_DECL(Bytes, State),
+ parse_xml_decl(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State) when is_binary(Bytes) ->
+ case unicode:characters_to_list(Bytes, Enc) of
+ {incomplete, _, _} ->
+ cf(Bytes, State, fun parse_xml_decl/2);
+ {error, _Encoded, _Rest} ->
+ ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])));
+ _ ->
+ parse_prolog(Bytes, State)
+ end;
+ parse_xml_decl(Bytes, State) ->
+ parse_prolog(Bytes, State)).
+
+-define(WHITESPACE(Bytes, State, Acc),
+ whitespace(?STRING_UNBOUND_REST(_C, _) = Bytes, State, Acc) ->
+ {lists:reverse(Acc), Bytes, State};
+ whitespace(Bytes, #xmerl_sax_parser_state{encoding=Enc} = State, Acc) when is_binary(Bytes) ->
+ case unicode:characters_to_list(Bytes, Enc) of
+ {incomplete, _, _} ->
+ cf(Bytes, State, Acc, fun whitespace/3);
+ {error, _Encoded, _Rest} ->
+ ?fatal_error(State, lists:flatten(io_lib:format("Bad character, not in ~p\n", [Enc])))
+ end).
+-define(PARSE_EXTERNAL_ENTITY_BYTE_ORDER_MARK(Bytes, State),
+ parse_external_entity_byte_order_mark(?STRING_EMPTY, State) ->
+ cf(?STRING_EMPTY, State, fun parse_external_entity_byte_order_mark/2);
+ parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_1, State) ->
+ cf(?BYTE_ORDER_MARK_1, State, fun parse_external_entity_byte_order_mark/2);
+ parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_2, State) ->
+ cf(?BYTE_ORDER_MARK_2, State, fun parse_external_entity_byte_order_mark/2);
+ parse_external_entity_byte_order_mark(?BYTE_ORDER_MARK_REST(Rest), State) ->
+ parse_external_entity_1(Rest, State);
+ parse_external_entity_byte_order_mark(Bytes, State) ->
+ parse_external_entity_1(Bytes, State)).
diff --git a/lib/xmerl/src/xmerl_scan.erl b/lib/xmerl/src/xmerl_scan.erl
index 5e0459ec21..95dc82e5c9 100644
--- a/lib/xmerl/src/xmerl_scan.erl
+++ b/lib/xmerl/src/xmerl_scan.erl
@@ -2225,16 +2225,18 @@ processed_whole_element(S=#xmerl_scanner{hook_fun = _Hook,
AllAttrs =
case S#xmerl_scanner.default_attrs of
true ->
- [ #xmlAttribute{name = AttName,
- parents = [{Name, Pos} | Parents],
- language = Lang,
- nsinfo = NSI,
- namespace = Namespace,
- value = AttValue,
- normalized = true} ||
- {AttName, AttValue} <- get_default_attrs(S, Name),
- AttValue =/= no_value,
- not lists:keymember(AttName, #xmlAttribute.name, Attrs) ];
+ DefaultAttrs =
+ [ #xmlAttribute{name = AttName,
+ parents = [{Name, Pos} | Parents],
+ language = Lang,
+ nsinfo = NSI,
+ namespace = Namespace,
+ value = AttValue,
+ normalized = true} ||
+ {AttName, AttValue} <- get_default_attrs(S, Name),
+ AttValue =/= no_value,
+ not lists:keymember(AttName, #xmlAttribute.name, Attrs) ],
+ lists:append(Attrs, DefaultAttrs);
false ->
Attrs
end,
@@ -2307,7 +2309,9 @@ expanded_name(Name, [], #xmlNamespace{default = URI}, S) ->
expanded_name(Name, N = {"xmlns", Local}, #xmlNamespace{nodes = Ns}, S) ->
{_, Value} = lists:keyfind(Local, 1, Ns),
case Name of
- 'xmlns:xml' when Value =/= 'http://www.w3.org/XML/1998/namespace' ->
+ 'xmlns:xml' when Value =:= 'http://www.w3.org/XML/1998/namespace' ->
+ N;
+ 'xmlns:xml' when Value =/= 'http://www.w3.org/XML/1998/namespace' ->
?fatal({xml_prefix_cannot_be_redeclared, Value}, S);
'xmlns:xmlns' ->
?fatal({xmlns_prefix_cannot_be_declared, Value}, S);
@@ -2321,6 +2325,8 @@ expanded_name(Name, N = {"xmlns", Local}, #xmlNamespace{nodes = Ns}, S) ->
N
end
end;
+expanded_name(_Name, {"xml", Local}, _NS, _S) ->
+ {'http://www.w3.org/XML/1998/namespace', list_to_atom(Local)};
expanded_name(_Name, {Prefix, Local}, #xmlNamespace{nodes = Ns}, S) ->
case lists:keysearch(Prefix, 1, Ns) of
{value, {_, URI}} ->
@@ -2331,9 +2337,6 @@ expanded_name(_Name, {Prefix, Local}, #xmlNamespace{nodes = Ns}, S) ->
?fatal({namespace_prefix_not_declared, Prefix}, S)
end.
-
-
-
keyreplaceadd(K, Pos, [H|T], Obj) when K == element(Pos, H) ->
[Obj|T];
keyreplaceadd(K, Pos, [H|T], Obj) ->
diff --git a/lib/xmerl/test/Makefile b/lib/xmerl/test/Makefile
index 7a326e334f..b13fee05b3 100644
--- a/lib/xmerl/test/Makefile
+++ b/lib/xmerl/test/Makefile
@@ -55,7 +55,8 @@ SUITE_FILES= \
xmerl_xsd_SUITE.erl \
xmerl_xsd_MS2002-01-16_SUITE.erl \
xmerl_xsd_NIST2002-01-16_SUITE.erl \
- xmerl_xsd_Sun2002-01-16_SUITE.erl
+ xmerl_xsd_Sun2002-01-16_SUITE.erl \
+ xmerl_sax_stream_SUITE.erl
XML_FILES= \
testcases.dtd \
@@ -125,4 +126,5 @@ release_tests_spec: opt
@tar cfh - xmerl_xsd_MS2002-01-16_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -)
@tar cfh - xmerl_xsd_NIST2002-01-16_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -)
@tar cfh - xmerl_xsd_Sun2002-01-16_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -)
+ @tar cfh - xmerl_sax_stream_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -)
chmod -R u+w "$(RELSYSDIR)"
diff --git a/lib/xmerl/test/xmerl_SUITE.erl b/lib/xmerl/test/xmerl_SUITE.erl
index e97b8c6a4b..58c462483c 100644
--- a/lib/xmerl/test/xmerl_SUITE.erl
+++ b/lib/xmerl/test/xmerl_SUITE.erl
@@ -54,7 +54,8 @@ groups() ->
cpd_expl_provided_DTD]},
{misc, [],
[latin1_alias, syntax_bug1, syntax_bug2, syntax_bug3,
- pe_ref1, copyright, testXSEIF, export_simple1, export]},
+ pe_ref1, copyright, testXSEIF, export_simple1, export,
+ default_attrs_bug, xml_ns]},
{eventp_tests, [], [sax_parse_and_export]},
{ticket_tests, [],
[ticket_5998, ticket_7211, ticket_7214, ticket_7430,
@@ -223,6 +224,50 @@ syntax_bug3(Config) ->
Err -> Err
end.
+default_attrs_bug(Config) ->
+ file:set_cwd(datadir(Config)),
+ Doc = "<!DOCTYPE doc [<!ATTLIST doc b CDATA \"default\">]>\n"
+ "<doc a=\"explicit\"/>",
+ {#xmlElement{attributes = [#xmlAttribute{name = a, value = "explicit"},
+ #xmlAttribute{name = b, value = "default"}]},
+ []
+ } = xmerl_scan:string(Doc, [{default_attrs, true}]),
+ Doc2 = "<!DOCTYPE doc [<!ATTLIST doc b CDATA \"default\">]>\n"
+ "<doc b=\"also explicit\" a=\"explicit\"/>",
+ {#xmlElement{attributes = [#xmlAttribute{name = b, value = "also explicit"},
+ #xmlAttribute{name = a, value = "explicit"}]},
+ []
+ } = xmerl_scan:string(Doc2, [{default_attrs, true}]),
+ ok.
+
+
+xml_ns(Config) ->
+ Doc = "<?xml version='1.0'?>\n"
+ "<doc xml:attr1=\"implicit xml ns\"/>",
+ {#xmlElement{namespace=#xmlNamespace{default = [], nodes = []},
+ attributes = [#xmlAttribute{name = 'xml:attr1',
+ expanded_name = {'http://www.w3.org/XML/1998/namespace',attr1},
+ nsinfo = {"xml","attr1"},
+ namespace = #xmlNamespace{default = [], nodes = []}}]},
+ []
+ } = xmerl_scan:string(Doc, [{namespace_conformant, true}]),
+ Doc2 = "<?xml version='1.0'?>\n"
+ "<doc xmlns:xml=\"http://www.w3.org/XML/1998/namespace\" xml:attr1=\"explicit xml ns\"/>",
+ {#xmlElement{namespace=#xmlNamespace{default = [], nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]},
+ attributes = [#xmlAttribute{name = 'xmlns:xml',
+ expanded_name = {"xmlns","xml"},
+ nsinfo = {"xmlns","xml"},
+ namespace = #xmlNamespace{default = [],
+ nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]}},
+ #xmlAttribute{name = 'xml:attr1',
+ expanded_name = {'http://www.w3.org/XML/1998/namespace',attr1},
+ nsinfo = {"xml","attr1"},
+ namespace = #xmlNamespace{default = [],
+ nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]}}]},
+ []
+ } = xmerl_scan:string(Doc2, [{namespace_conformant, true}]),
+ ok.
+
pe_ref1(Config) ->
file:set_cwd(datadir(Config)),
{#xmlElement{},[]} = xmerl_scan:file(datadir_join(Config,[misc,"PE_ref1.xml"]),[{validation,true}]).
diff --git a/lib/xmerl/test/xmerl_sax_SUITE.erl b/lib/xmerl/test/xmerl_sax_SUITE.erl
index f5c0a783c4..7d1a70905c 100644
--- a/lib/xmerl/test/xmerl_sax_SUITE.erl
+++ b/lib/xmerl/test/xmerl_sax_SUITE.erl
@@ -85,17 +85,17 @@ ticket_11551(_Config) ->
<a>hej</a>
<?xml version=\"1.0\" encoding=\"utf-8\" ?>
<a>hej</a>">>,
- {ok, undefined, <<"<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream1, []),
+ {ok, undefined, <<"\n<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream1, []),
Stream2= <<"<?xml version=\"1.0\" encoding=\"utf-8\" ?>
<a>hej</a>
<?xml version=\"1.0\" encoding=\"utf-8\" ?>
<a>hej</a>">>,
- {ok, undefined, <<"<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream2, []),
+ {ok, undefined, <<"\n\n\n<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream2, []),
Stream3= <<"<a>hej</a>
<?xml version=\"1.0\" encoding=\"utf-8\" ?>
<a>hej</a>">>,
- {ok, undefined, <<"<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream3, []),
+ {ok, undefined, <<"\n\n<?xml", _/binary>>} = xmerl_sax_parser:stream(Stream3, []),
ok.
diff --git a/lib/xmerl/test/xmerl_sax_std_SUITE.erl b/lib/xmerl/test/xmerl_sax_std_SUITE.erl
index 525a3b175a..b8412206cc 100644
--- a/lib/xmerl/test/xmerl_sax_std_SUITE.erl
+++ b/lib/xmerl/test/xmerl_sax_std_SUITE.erl
@@ -2,7 +2,7 @@
%%----------------------------------------------------------------------
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -507,11 +507,8 @@ end_per_testcase(_Func,_Config) ->
'not-wf-sa-036'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/036.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"Illegal data\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -522,11 +519,8 @@ end_per_testcase(_Func,_Config) ->
'not-wf-sa-037'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/037.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"&#32;\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -561,11 +555,8 @@ end_per_testcase(_Func,_Config) ->
'not-wf-sa-040'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/040.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"<doc></doc>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -576,11 +567,8 @@ end_per_testcase(_Func,_Config) ->
'not-wf-sa-041'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/041.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"<doc></doc>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -603,11 +591,8 @@ end_per_testcase(_Func,_Config) ->
'not-wf-sa-043'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/043.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"Illegal data\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -618,11 +603,8 @@ end_per_testcase(_Func,_Config) ->
'not-wf-sa-044'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/044.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"<doc/>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -669,11 +651,8 @@ end_per_testcase(_Func,_Config) ->
'not-wf-sa-048'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/048.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"<![CDATA[]]>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -1416,11 +1395,8 @@ end_per_testcase(_Func,_Config) ->
'not-wf-sa-110'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"xmltest","not-wf/sa/110.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"&e;\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -1914,9 +1890,9 @@ end_per_testcase(_Func,_Config) ->
%% Special case becase we returns everything after a legal document
%% as an rest instead of giving and error to let the user handle
%% multipple docs on a stream.
- {ok,_,<<"<?xml version=\"1.0\"?>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- % R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
- % check_result(R, "not-wf").
+ %{ok,_,<<"<?xml version=\"1.0\"?>\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -7784,11 +7760,8 @@ end_per_testcase(_Func,_Config) ->
'o-p01fail3'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"oasis","p01fail3.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_, <<"<bad/>", _/binary>>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -11417,12 +11390,8 @@ end_per_testcase(_Func,_Config) ->
'ibm-not-wf-P01-ibm01n02'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"ibm","not-wf/P01/ibm01n02.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_, <<"<?xml version=\"1.0\"?>", _/binary>>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- % R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
- % check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Case
@@ -11433,11 +11402,8 @@ end_per_testcase(_Func,_Config) ->
'ibm-not-wf-P01-ibm01n03'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"ibm","not-wf/P01/ibm01n03.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_, <<"<title>Wrong combination!</title>", _/binary>>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Cases
@@ -13027,11 +12993,8 @@ end_per_testcase(_Func,_Config) ->
'ibm-not-wf-P27-ibm27n01'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"ibm","not-wf/P27/ibm27n01.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_, <<"<!ELEMENT cat EMPTY>">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Cases
@@ -13461,11 +13424,8 @@ end_per_testcase(_Func,_Config) ->
'ibm-not-wf-P39-ibm39n06'(Config) ->
file:set_cwd(xmerl_test_lib:get_data_dir(Config)),
Path = filename:join([xmerl_test_lib:get_data_dir(Config),"ibm","not-wf/P39/ibm39n06.xml"]),
- %% Special case becase we returns everything after a legal document
- %% as an rest instead of giving and error to let the user handle
- %% multipple docs on a stream.
- {ok,_,<<"content after end tag\r\n">>} = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]).
- %%check_result(R, "not-wf").
+ R = xmerl_sax_parser:file(Path, [{event_fun, fun(_,_,S) -> S end}]),
+ check_result(R, "not-wf").
%%----------------------------------------------------------------------
%% Test Cases
diff --git a/lib/xmerl/test/xmerl_sax_stream_SUITE.erl b/lib/xmerl/test/xmerl_sax_stream_SUITE.erl
new file mode 100644
index 0000000000..a306eb66a2
--- /dev/null
+++ b/lib/xmerl/test/xmerl_sax_stream_SUITE.erl
@@ -0,0 +1,245 @@
+%%-*-erlang-*-
+%%----------------------------------------------------------------------
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%----------------------------------------------------------------------
+%% File : xmerl_sax_stream_SUITE.erl
+%%----------------------------------------------------------------------
+-module(xmerl_sax_stream_SUITE).
+-compile(export_all).
+
+%%----------------------------------------------------------------------
+%% Include files
+%%----------------------------------------------------------------------
+-include_lib("common_test/include/ct.hrl").
+-include_lib("kernel/include/file.hrl").
+
+%%======================================================================
+%% External functions
+%%======================================================================
+
+%%----------------------------------------------------------------------
+%% Initializations
+%%----------------------------------------------------------------------
+all() ->
+ [
+ one_document,
+ two_documents,
+ one_document_and_junk
+ ].
+
+%%----------------------------------------------------------------------
+%% Initializations
+%%----------------------------------------------------------------------
+
+init_per_suite(Config) ->
+ Config.
+
+end_per_suite(_Config) ->
+ ok.
+
+init_per_testcase(_TestCase, Config) ->
+ Config.
+
+end_per_testcase(_Func, _Config) ->
+ ok.
+
+%%----------------------------------------------------------------------
+%% Tests
+%%----------------------------------------------------------------------
+one_document(Config) ->
+ Port = 11111,
+
+ {ok, ListenSocket} = listen(Port),
+ Self = self(),
+
+ spawn(
+ fun() ->
+ case catch gen_tcp:accept(ListenSocket) of
+ {ok, S} ->
+ Result = xmerl_sax_parser:stream(<<>>,
+ [{continuation_state, S},
+ {continuation_fun,
+ fun(Sd) ->
+ io:format("Continuation called!!", []),
+ case gen_tcp:recv(Sd, 0) of
+ {ok, Packet} ->
+ io:format("Packet: ~p\n", [Packet]),
+ {Packet, Sd};
+ {error, Reason} ->
+ throw({error, Reason})
+ end
+ end}]),
+ Self ! {xmerl_sax, Result},
+ close(S);
+ Error ->
+ Self ! {xmerl_sax, {error, {accept, Error}}}
+ end
+ end),
+
+ {ok, SendSocket} = connect(localhost, Port),
+
+ {ok, Binary} = file:read_file(filename:join([datadir(Config), "xmerl_sax_stream_one.xml"])),
+
+ send_chunks(SendSocket, Binary),
+
+ receive
+ {xmerl_sax, {ok, undefined, Rest}} ->
+ <<"\n">> = Rest,
+ io:format("Ok Rest: ~p\n", [Rest])
+ after 5000 ->
+ ct:fail("Timeout")
+ end,
+ ok.
+
+two_documents(Config) ->
+ Port = 11111,
+
+ {ok, ListenSocket} = listen(Port),
+ Self = self(),
+
+ spawn(
+ fun() ->
+ case catch gen_tcp:accept(ListenSocket) of
+ {ok, S} ->
+ Result = xmerl_sax_parser:stream(<<>>,
+ [{continuation_state, S},
+ {continuation_fun,
+ fun(Sd) ->
+ io:format("Continuation called!!", []),
+ case gen_tcp:recv(Sd, 0) of
+ {ok, Packet} ->
+ io:format("Packet: ~p\n", [Packet]),
+ {Packet, Sd};
+ {error, Reason} ->
+ throw({error, Reason})
+ end
+ end}]),
+ Self ! {xmerl_sax, Result},
+ close(S);
+ Error ->
+ Self ! {xmerl_sax, {error, {accept, Error}}}
+ end
+ end),
+
+ {ok, SendSocket} = connect(localhost, Port),
+
+ {ok, Binary} = file:read_file(filename:join([datadir(Config), "xmerl_sax_stream_two.xml"])),
+
+ send_chunks(SendSocket, Binary),
+
+ receive
+ {xmerl_sax, {ok, undefined, Rest}} ->
+ <<"\n<?x", _R/binary>> = Rest,
+ io:format("Ok Rest: ~p\n", [Rest])
+ after 5000 ->
+ ct:fail("Timeout")
+ end,
+ ok.
+
+one_document_and_junk(Config) ->
+ Port = 11111,
+
+ {ok, ListenSocket} = listen(Port),
+ Self = self(),
+
+ spawn(
+ fun() ->
+ case catch gen_tcp:accept(ListenSocket) of
+ {ok, S} ->
+ Result = xmerl_sax_parser:stream(<<>>,
+ [{continuation_state, S},
+ {continuation_fun,
+ fun(Sd) ->
+ io:format("Continuation called!!", []),
+ case gen_tcp:recv(Sd, 0) of
+ {ok, Packet} ->
+ io:format("Packet: ~p\n", [Packet]),
+ {Packet, Sd};
+ {error, Reason} ->
+ throw({error, Reason})
+ end
+ end}]),
+ Self ! {xmerl_sax, Result},
+ close(S);
+ Error ->
+ Self ! {xmerl_sax, {error, {accept, Error}}}
+ end
+ end),
+
+ {ok, SendSocket} = connect(localhost, Port),
+
+ {ok, Binary} = file:read_file(filename:join([datadir(Config), "xmerl_sax_stream_one_junk.xml"])),
+
+ send_chunks(SendSocket, Binary),
+
+ receive
+ {xmerl_sax, {ok, undefined, Rest}} ->
+ <<"\nth", _R/binary>> = Rest,
+ io:format("Ok Rest: ~p\n", [Rest])
+ after 10000 ->
+ ct:fail("Timeout")
+ end,
+ ok.
+
+%%----------------------------------------------------------------------
+%% Utility functions
+%%----------------------------------------------------------------------
+listen(Port) ->
+ case catch gen_tcp:listen(Port, [{active, false},
+ binary,
+ {keepalive, true},
+ {reuseaddr,true}]) of
+ {ok, ListenSocket} ->
+ {ok, ListenSocket};
+ {error, Reason} ->
+ {error, {listen, Reason}}
+ end.
+
+close(Socket) ->
+ (catch gen_tcp:close(Socket)).
+
+connect(Host, Port) ->
+ Timeout = 5000,
+ % Options1 = check_options(Options),
+ Options = [binary],
+ case catch gen_tcp:connect(Host, Port, Options, Timeout) of
+ {ok, Socket} ->
+ {ok, Socket};
+ {error, Reason} ->
+ {error, Reason}
+ end.
+
+send_chunks(Socket, Binary) ->
+ BSize = erlang:size(Binary),
+ if
+ BSize > 25 ->
+ <<Head:25/binary, Tail/binary>> = Binary,
+ case gen_tcp:send(Socket, Head) of
+ ok ->
+ timer:sleep(1000),
+ send_chunks(Socket, Tail);
+ {error,closed} ->
+ ok
+ end;
+ true ->
+ gen_tcp:send(Socket, Binary)
+ end.
+
+datadir(Config) ->
+ proplists:get_value(data_dir, Config).
diff --git a/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one.xml b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one.xml
new file mode 100644
index 0000000000..30328bb188
--- /dev/null
+++ b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0"?>
+<person>
+<name>
+Arne Andersson
+</name>
+<address>
+<street>
+ Old Road 456
+</street>
+<zip>
+12323
+</zip>
+<city>
+Small City
+</city>
+</address>
+</person>
diff --git a/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one_junk.xml b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one_junk.xml
new file mode 100644
index 0000000000..f730a95865
--- /dev/null
+++ b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_one_junk.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0"?>
+<person>
+<name>
+Arne Andersson
+</name>
+<address>
+<street>
+ Old Road 456
+</street>
+<zip>
+12323
+</zip>
+<city>
+Small City
+</city>
+</address>
+</person>
+this is junk ......
diff --git a/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_two.xml b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_two.xml
new file mode 100644
index 0000000000..e241a02190
--- /dev/null
+++ b/lib/xmerl/test/xmerl_sax_stream_SUITE_data/xmerl_sax_stream_two.xml
@@ -0,0 +1,34 @@
+<?xml version="1.0"?>
+<person>
+<name>
+Arne Andersson
+</name>
+<address>
+<street>
+ Old Road 456
+</street>
+<zip>
+12323
+</zip>
+<city>
+Small City
+</city>
+</address>
+</person>
+<?xml version="1.0"?>
+<person>
+<name>
+Bertil Bengtson
+</name>
+<address>
+<street>
+ New Road 4
+</street>
+<zip>
+12328
+</zip>
+<city>
+Small City
+</city>
+</address>
+</person>