aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/.gitignore8
-rw-r--r--lib/Makefile2
-rw-r--r--lib/asn1/doc/src/asn1_getting_started.xml77
-rw-r--r--lib/asn1/doc/src/asn1ct.xml17
-rw-r--r--lib/asn1/examples/recordnames.txt2
-rw-r--r--lib/asn1/src/asn1_db.erl26
-rw-r--r--lib/asn1/src/asn1_records.hrl23
-rw-r--r--lib/asn1/src/asn1ct.erl226
-rw-r--r--lib/asn1/src/asn1ct_check.erl29
-rw-r--r--lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl300
-rw-r--r--lib/asn1/src/asn1ct_constructed_per.erl900
-rw-r--r--lib/asn1/src/asn1ct_eval_ext.funcs1
-rw-r--r--lib/asn1/src/asn1ct_gen.erl668
-rw-r--r--lib/asn1/src/asn1ct_gen_ber_bin_v2.erl12
-rw-r--r--lib/asn1/src/asn1ct_gen_check.erl191
-rw-r--r--lib/asn1/src/asn1ct_gen_per.erl12
-rw-r--r--lib/asn1/src/asn1ct_imm.erl64
-rw-r--r--lib/asn1/src/asn1ct_value.erl24
-rw-r--r--lib/asn1/src/asn1rtt_ext.erl62
-rw-r--r--lib/asn1/test/Makefile4
-rw-r--r--lib/asn1/test/asn1_SUITE.erl166
-rw-r--r--lib/asn1/test/asn1_SUITE_data/Maps.asn117
-rw-r--r--lib/asn1/test/asn1_SUITE_data/SeqExtension.asn111
-rw-r--r--lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Descriptions.asn (renamed from lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Discriptions.asn)2
-rw-r--r--lib/asn1/test/asn1_SUITE_data/test_records.erl2
-rw-r--r--lib/asn1/test/asn1_app_SUITE.erl (renamed from lib/asn1/test/asn1_app_test.erl)58
-rw-r--r--lib/asn1/test/asn1_appup_test.erl58
-rw-r--r--lib/asn1/test/asn1_test_lib.erl105
-rw-r--r--lib/asn1/test/h323test.erl29
-rw-r--r--lib/asn1/test/testContextSwitchingTypes.erl1
-rw-r--r--lib/asn1/test/testInfObj.erl1
-rw-r--r--lib/asn1/test/testMaps.erl50
-rw-r--r--lib/asn1/test/testMultipleLevels.erl6
-rw-r--r--lib/asn1/test/testNBAPsystem.erl14
-rw-r--r--lib/asn1/test/testRfcs.erl50
-rw-r--r--lib/asn1/test/testSeqExtension.erl38
-rw-r--r--lib/asn1/test/testTCAP.erl1
-rw-r--r--lib/asn1/test/testTimer.erl131
-rw-r--r--lib/asn1/test/testUniqueObjectSets.erl1
-rw-r--r--lib/asn1/test/test_compile_options.erl28
-rw-r--r--lib/compiler/doc/src/compile.xml11
-rw-r--r--lib/compiler/src/beam_asm.erl39
-rw-r--r--lib/compiler/src/beam_dict.erl12
-rw-r--r--lib/compiler/src/compile.erl34
-rw-r--r--lib/compiler/test/compile_SUITE.erl40
-rw-r--r--lib/compiler/test/compile_SUITE_data/simple.erl5
-rw-r--r--lib/compiler/test/lc_SUITE.erl2
-rw-r--r--lib/compiler/test/map_SUITE.erl1
-rw-r--r--lib/crypto/c_src/crypto.c28
-rw-r--r--lib/crypto/doc/src/crypto.xml2
-rw-r--r--lib/crypto/src/crypto.erl12
-rw-r--r--lib/debugger/test/map_SUITE.erl22
-rw-r--r--lib/dialyzer/RELEASE_NOTES2
-rw-r--r--lib/dialyzer/src/dialyzer_analysis_callgraph.erl43
-rw-r--r--lib/dialyzer/src/dialyzer_callgraph.erl111
-rw-r--r--lib/dialyzer/src/dialyzer_cl.erl38
-rw-r--r--lib/dialyzer/src/dialyzer_codeserver.erl117
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl82
-rw-r--r--lib/dialyzer/src/dialyzer_coordinator.erl65
-rw-r--r--lib/dialyzer/src/dialyzer_dataflow.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl36
-rw-r--r--lib/dialyzer/src/dialyzer_succ_typings.erl8
-rw-r--r--lib/dialyzer/src/dialyzer_typesig.erl103
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl115
-rw-r--r--lib/dialyzer/src/dialyzer_worker.erl53
-rw-r--r--lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/map_SUITE_data/dialyzer_options1
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/map_galore8
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/map_galore.erl24
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/dialyzer_options2
-rw-r--r--lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl6
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl2
-rw-r--r--lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl2
-rw-r--r--lib/dialyzer/test/plt_SUITE.erl23
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl6
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl10
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl4
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl8
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl2
-rw-r--r--lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl2
-rw-r--r--lib/dialyzer/test/small_SUITE_data/src/tuple1.erl2
-rw-r--r--lib/diameter/include/diameter_gen.hrl2
-rw-r--r--lib/diameter/src/base/diameter_callback.erl2
-rw-r--r--lib/diameter/src/base/diameter_config.erl2
-rw-r--r--lib/diameter/src/base/diameter_peer_fsm.erl2
-rw-r--r--lib/diameter/src/info/diameter_info.erl2
-rw-r--r--lib/diameter/src/transport/diameter_sctp.erl2
-rw-r--r--lib/diameter/test/diameter_pool_SUITE.erl2
-rw-r--r--lib/edoc/src/edoc_tags.erl2
-rw-r--r--lib/eldap/test/README2
-rw-r--r--lib/erl_interface/src/README2
-rw-r--r--lib/erl_interface/src/legacy/erl_marshal.c4
-rw-r--r--lib/erl_interface/src/misc/ei_locking.c4
-rw-r--r--lib/erl_interface/test/ei_decode_SUITE.erl2
-rw-r--r--lib/erl_interface/test/erl_eterm_SUITE.erl2
-rw-r--r--lib/eunit/doc/src/notes.xml2
-rw-r--r--lib/hipe/cerl/cerl_to_icode.erl2
-rw-r--r--lib/hipe/cerl/erl_types.erl119
-rw-r--r--lib/hipe/doc/src/notes.xml2
-rw-r--r--lib/hipe/flow/cfg.inc2
-rw-r--r--lib/hipe/flow/hipe_dominators.erl2
-rw-r--r--lib/hipe/llvm/hipe_rtl_to_llvm.erl4
-rw-r--r--lib/hipe/main/hipe.erl2
-rw-r--r--lib/hipe/opt/hipe_schedule.erl4
-rw-r--r--lib/hipe/opt/hipe_spillmin_color.erl2
-rw-r--r--lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl1
-rw-r--r--lib/inets/doc/src/notes.xml17
-rw-r--r--lib/inets/src/ftp/ftp.erl53
-rw-r--r--lib/inets/src/http_server/httpd_request_handler.erl4
-rw-r--r--lib/inets/src/inets_app/inets.appup.src4
-rw-r--r--lib/inets/test/httpd_test_data/server_root/conf/httpd.conf2
-rw-r--r--lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf2
-rw-r--r--lib/inets/vsn.mk2
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java2
-rw-r--r--lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java2
-rw-r--r--lib/kernel/doc/src/kernel_app.xml81
-rw-r--r--lib/kernel/doc/src/notes.xml4
-rw-r--r--lib/kernel/doc/src/os.xml31
-rw-r--r--lib/kernel/include/inet.hrl2
-rw-r--r--lib/kernel/src/Makefile1
-rw-r--r--lib/kernel/src/code.erl12
-rw-r--r--lib/kernel/src/dist_ac.erl10
-rw-r--r--lib/kernel/src/erl_signal_handler.erl57
-rw-r--r--lib/kernel/src/error_logger.erl8
-rw-r--r--lib/kernel/src/file.erl2
-rw-r--r--lib/kernel/src/inet_parse.erl4
-rw-r--r--lib/kernel/src/inet_udp.erl6
-rw-r--r--lib/kernel/src/kernel.app.src1
-rw-r--r--lib/kernel/src/kernel.erl13
-rw-r--r--lib/kernel/src/os.erl11
-rw-r--r--lib/kernel/src/rpc.erl14
-rw-r--r--lib/kernel/test/application_SUITE.erl2
-rw-r--r--lib/kernel/test/code_SUITE.erl2
-rw-r--r--lib/kernel/test/erl_distribution_SUITE.erl4
-rw-r--r--lib/kernel/test/erl_distribution_wb_SUITE.erl2
-rw-r--r--lib/kernel/test/error_logger_SUITE.erl13
-rw-r--r--lib/kernel/test/file_SUITE.erl2
-rw-r--r--lib/kernel/test/file_SUITE_data/realmen.html4
-rw-r--r--lib/kernel/test/multi_load_SUITE.erl8
-rw-r--r--lib/kernel/test/rpc_SUITE.erl12
-rw-r--r--lib/megaco/src/text/megaco_text_gen_prev3a.hrl2
-rw-r--r--lib/megaco/src/text/megaco_text_gen_prev3b.hrl2
-rw-r--r--lib/megaco/src/text/megaco_text_gen_prev3c.hrl2
-rw-r--r--lib/mnesia/doc/src/Mnesia_chap5.xmlsrc5
-rw-r--r--lib/mnesia/doc/src/notes.xml18
-rw-r--r--lib/mnesia/src/Makefile1
-rw-r--r--lib/mnesia/src/mnesia.app.src1
-rw-r--r--lib/mnesia/src/mnesia.erl1
-rw-r--r--lib/mnesia/src/mnesia.hrl7
-rw-r--r--lib/mnesia/src/mnesia_checkpoint.erl7
-rw-r--r--lib/mnesia/src/mnesia_event.erl3
-rw-r--r--lib/mnesia/src/mnesia_frag.erl76
-rw-r--r--lib/mnesia/src/mnesia_frag_old_hash.erl133
-rw-r--r--lib/mnesia/src/mnesia_monitor.erl2
-rw-r--r--lib/mnesia/src/mnesia_schema.erl2
-rw-r--r--lib/mnesia/test/mnesia_evil_backup.erl43
-rw-r--r--lib/mnesia/vsn.mk2
-rw-r--r--lib/observer/src/cdv_bin_cb.erl2
-rw-r--r--lib/observer/src/cdv_detail_wx.erl2
-rw-r--r--lib/observer/src/observer_app_wx.erl8
-rw-r--r--lib/observer/src/observer_lib.erl12
-rw-r--r--lib/observer/src/observer_port_wx.erl27
-rw-r--r--lib/observer/src/observer_procinfo.erl2
-rw-r--r--lib/observer/src/observer_tv_wx.erl9
-rw-r--r--lib/observer/src/observer_wx.erl7
-rw-r--r--lib/observer/test/observer_SUITE.erl17
-rw-r--r--lib/orber/src/orber_iiop.hrl4
-rw-r--r--lib/orber/src/orber_initial_references.erl2
-rw-r--r--lib/orber/src/orber_objectkeys.erl2
-rw-r--r--lib/os_mon/src/memsup.erl1
-rw-r--r--lib/parsetools/src/leex.erl4
-rw-r--r--lib/percept/AUTHORS4
-rw-r--r--lib/percept/Makefile35
-rw-r--r--lib/percept/c_src/.gitignore0
-rw-r--r--lib/percept/doc/html/.gitignore0
-rw-r--r--lib/percept/doc/man3/.gitignore0
-rw-r--r--lib/percept/doc/pdf/.gitignore0
-rw-r--r--lib/percept/doc/src/Makefile190
-rw-r--r--lib/percept/doc/src/book.xml52
-rw-r--r--lib/percept/doc/src/egd_ug.xmlsrc90
-rw-r--r--lib/percept/doc/src/fascicules.xml18
-rw-r--r--lib/percept/doc/src/img.erl50
-rw-r--r--lib/percept/doc/src/img_esi.erl25
-rw-r--r--lib/percept/doc/src/img_esi_result.gifbin374 -> 0 bytes
-rw-r--r--lib/percept/doc/src/ipc_tree.erl30
-rw-r--r--lib/percept/doc/src/notes.xml495
-rw-r--r--lib/percept/doc/src/part.xml47
-rw-r--r--lib/percept/doc/src/part_notes.xml41
-rw-r--r--lib/percept/doc/src/percept_compare.gifbin241343 -> 0 bytes
-rw-r--r--lib/percept/doc/src/percept_examples.html11
-rw-r--r--lib/percept/doc/src/percept_overview.gifbin158719 -> 0 bytes
-rw-r--r--lib/percept/doc/src/percept_processes.gifbin182273 -> 0 bytes
-rw-r--r--lib/percept/doc/src/percept_processinfo.gifbin135512 -> 0 bytes
-rw-r--r--lib/percept/doc/src/percept_ug.xmlsrc223
-rw-r--r--lib/percept/doc/src/ref_man.xml48
-rw-r--r--lib/percept/doc/src/sorter.erl41
-rw-r--r--lib/percept/doc/src/test1.gifbin951 -> 0 bytes
-rw-r--r--lib/percept/doc/src/test2.gifbin1035 -> 0 bytes
-rw-r--r--lib/percept/doc/src/test3.gifbin2382 -> 0 bytes
-rw-r--r--lib/percept/doc/src/test4.gifbin2294 -> 0 bytes
-rw-r--r--lib/percept/doc/stylesheet.css39
-rw-r--r--lib/percept/ebin/.gitignore0
-rw-r--r--lib/percept/include/.gitignore0
-rw-r--r--lib/percept/info2
-rw-r--r--lib/percept/priv/Makefile97
-rw-r--r--lib/percept/priv/fonts/6x11_latin1.wingsfontbin2016 -> 0 bytes
-rw-r--r--lib/percept/priv/logs/.gitignore0
-rw-r--r--lib/percept/priv/obj/.gitignore0
-rw-r--r--lib/percept/priv/server_root/cgi-bin/.gitignore0
-rw-r--r--lib/percept/priv/server_root/conf/mime.types462
-rw-r--r--lib/percept/priv/server_root/css/percept.css162
-rw-r--r--lib/percept/priv/server_root/htdocs/index.html41
-rw-r--r--lib/percept/priv/server_root/images/nav.pngbin560 -> 0 bytes
-rw-r--r--lib/percept/priv/server_root/images/white.pngbin69 -> 0 bytes
-rw-r--r--lib/percept/priv/server_root/scripts/percept_area_select.js182
-rw-r--r--lib/percept/priv/server_root/scripts/percept_error_handler.js26
-rw-r--r--lib/percept/priv/server_root/scripts/percept_select_all.js28
-rw-r--r--lib/percept/src/Makefile108
-rw-r--r--lib/percept/src/egd.erl275
-rw-r--r--lib/percept/src/egd.hrl45
-rw-r--r--lib/percept/src/egd_font.erl173
-rw-r--r--lib/percept/src/egd_png.erl105
-rw-r--r--lib/percept/src/egd_primitives.erl412
-rw-r--r--lib/percept/src/egd_render.erl664
-rw-r--r--lib/percept/src/percept.app.src45
-rw-r--r--lib/percept/src/percept.appup.src22
-rw-r--r--lib/percept/src/percept.erl337
-rw-r--r--lib/percept/src/percept.hrl53
-rw-r--r--lib/percept/src/percept_analyzer.erl368
-rw-r--r--lib/percept/src/percept_db.erl780
-rw-r--r--lib/percept/src/percept_graph.erl134
-rw-r--r--lib/percept/src/percept_html.erl707
-rw-r--r--lib/percept/src/percept_image.erl316
-rw-r--r--lib/percept/test/Makefile91
-rw-r--r--lib/percept/test/egd_SUITE.erl389
-rw-r--r--lib/percept/test/ipc_tree.erl49
-rw-r--r--lib/percept/test/percept.cover2
-rw-r--r--lib/percept/test/percept.spec1
-rw-r--r--lib/percept/test/percept_SUITE.erl126
-rw-r--r--lib/percept/test/percept_SUITE_data/ipc-dist.datbin2098105 -> 0 bytes
-rw-r--r--lib/percept/test/percept_db_SUITE.erl55
-rw-r--r--lib/percept/vsn.mk1
-rw-r--r--lib/public_key/asn1/PKCS-8.asn12
-rw-r--r--lib/public_key/doc/src/public_key.xml38
-rw-r--r--lib/public_key/doc/src/using_public_key.xml253
-rw-r--r--lib/public_key/src/public_key.erl190
-rw-r--r--lib/public_key/test/public_key_SUITE.erl145
-rw-r--r--lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_cn.pem17
-rw-r--r--lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem14
-rw-r--r--lib/public_key/test/public_key_SUITE_data/verify_hostname.conf16
-rw-r--r--lib/runtime_tools/doc/src/LTTng.xml2
-rw-r--r--lib/runtime_tools/src/Makefile1
-rw-r--r--lib/runtime_tools/src/percept_profile.erl195
-rw-r--r--lib/runtime_tools/src/runtime_tools.app.src6
-rw-r--r--lib/sasl/doc/src/systools.xml6
-rw-r--r--lib/sasl/src/release_handler.erl6
-rw-r--r--lib/sasl/src/systools_make.erl172
-rw-r--r--lib/sasl/src/systools_relup.erl150
-rw-r--r--lib/sasl/test/systools_SUITE.erl116
-rw-r--r--lib/snmp/test/snmp_manager_test.erl12
-rw-r--r--lib/ssh/doc/src/ssh.xml58
-rw-r--r--lib/ssh/doc/src/ssh_app.xml9
-rw-r--r--lib/ssh/src/ssh.app.src1
-rw-r--r--lib/ssh/src/ssh.erl47
-rw-r--r--lib/ssh/src/ssh_bits.erl46
-rw-r--r--lib/ssh/src/ssh_cli.erl18
-rw-r--r--lib/ssh/src/ssh_connection_handler.erl48
-rw-r--r--lib/ssh/src/ssh_dbg.erl66
-rw-r--r--lib/ssh/src/ssh_sftp.erl6
-rw-r--r--lib/ssh/src/ssh_sftpd.erl55
-rw-r--r--lib/ssh/src/ssh_transport.erl281
-rw-r--r--lib/ssh/src/ssh_transport.hrl13
-rw-r--r--lib/ssh/test/property_test/ssh_eqc_encode_decode.erl5
-rw-r--r--lib/ssh/test/ssh_algorithms_SUITE.erl21
-rw-r--r--lib/ssh/test/ssh_basic_SUITE.erl76
-rw-r--r--lib/ssh/test/ssh_benchmark_SUITE.erl20
-rw-r--r--lib/ssh/test/ssh_key_cb.erl4
-rw-r--r--lib/ssh/test/ssh_key_cb_options.erl2
-rw-r--r--lib/ssh/test/ssh_options_SUITE.erl27
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE.erl86
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test4
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli5
-rw-r--r--lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key16
-rw-r--r--lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub5
-rw-r--r--lib/ssh/test/ssh_sftpd_SUITE.erl161
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl2
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa15
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key16
-rw-r--r--lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub5
-rw-r--r--lib/ssh/test/ssh_test_lib.erl7
-rw-r--r--lib/ssh/test/ssh_to_openssh_SUITE.erl4
-rw-r--r--lib/ssh/test/ssh_trpt_test_lib.erl5
-rw-r--r--lib/ssl/src/Makefile15
-rw-r--r--lib/ssl/src/dtls_record.erl2
-rw-r--r--lib/ssl/src/ssl.app.src13
-rw-r--r--lib/ssl/src/ssl.erl2
-rw-r--r--lib/ssl/src/ssl_admin_sup.erl95
-rw-r--r--lib/ssl/src/ssl_certificate.erl16
-rw-r--r--lib/ssl/src/ssl_config.erl52
-rw-r--r--lib/ssl/src/ssl_connection.erl41
-rw-r--r--lib/ssl/src/ssl_connection_sup.erl101
-rw-r--r--lib/ssl/src/ssl_crl.erl76
-rw-r--r--lib/ssl/src/ssl_dist_admin_sup.erl74
-rw-r--r--lib/ssl/src/ssl_dist_connection_sup.erl79
-rw-r--r--lib/ssl/src/ssl_dist_sup.erl42
-rw-r--r--lib/ssl/src/ssl_handshake.erl58
-rw-r--r--lib/ssl/src/ssl_manager.erl209
-rw-r--r--lib/ssl/src/ssl_pem_cache.erl266
-rw-r--r--lib/ssl/src/ssl_pkix_db.erl72
-rw-r--r--lib/ssl/src/ssl_record.erl4
-rw-r--r--lib/ssl/src/ssl_sup.erl86
-rw-r--r--lib/ssl/src/tls_handshake.erl69
-rw-r--r--lib/ssl/src/tls_v1.erl2
-rw-r--r--lib/ssl/test/make_certs.erl4
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl4
-rw-r--r--lib/ssl/test/ssl_bench_SUITE.erl10
-rw-r--r--lib/ssl/test/ssl_handshake_SUITE.erl9
-rw-r--r--lib/ssl/test/ssl_pem_cache_SUITE.erl4
-rw-r--r--lib/ssl/test/ssl_test_lib.erl13
-rw-r--r--lib/stdlib/doc/src/c.xml26
-rw-r--r--lib/stdlib/doc/src/erl_tar.xml72
-rw-r--r--lib/stdlib/doc/src/filelib.xml54
-rw-r--r--lib/stdlib/doc/src/filename.xml10
-rw-r--r--lib/stdlib/doc/src/shell.xml10
-rw-r--r--lib/stdlib/src/Makefile4
-rw-r--r--lib/stdlib/src/beam_lib.erl54
-rw-r--r--lib/stdlib/src/binary.erl2
-rw-r--r--lib/stdlib/src/c.erl254
-rw-r--r--lib/stdlib/src/dets.erl7
-rw-r--r--lib/stdlib/src/edlin_expand.erl95
-rw-r--r--lib/stdlib/src/erl_expand_records.erl18
-rw-r--r--lib/stdlib/src/erl_tar.erl2562
-rw-r--r--lib/stdlib/src/erl_tar.hrl394
-rw-r--r--lib/stdlib/src/ets.erl18
-rw-r--r--lib/stdlib/src/filelib.erl122
-rw-r--r--lib/stdlib/src/filename.erl109
-rw-r--r--lib/stdlib/src/gen_fsm.erl2
-rw-r--r--lib/stdlib/src/io_lib.erl2
-rw-r--r--lib/stdlib/src/io_lib_format.erl5
-rw-r--r--lib/stdlib/src/otp_internal.erl17
-rw-r--r--lib/stdlib/src/proplists.erl2
-rw-r--r--lib/stdlib/src/shell_default.erl3
-rw-r--r--lib/stdlib/test/base64_SUITE.erl2
-rw-r--r--lib/stdlib/test/beam_lib_SUITE.erl45
-rw-r--r--lib/stdlib/test/dets_SUITE.erl9
-rw-r--r--lib/stdlib/test/edlin_expand_SUITE.erl79
-rw-r--r--lib/stdlib/test/erl_lint_SUITE.erl51
-rw-r--r--lib/stdlib/test/erl_scan_SUITE.erl15
-rw-r--r--lib/stdlib/test/ets_SUITE.erl217
-rw-r--r--lib/stdlib/test/ets_tough_SUITE.erl58
-rw-r--r--lib/stdlib/test/filelib_SUITE.erl55
-rw-r--r--lib/stdlib/test/filename_SUITE.erl6
-rw-r--r--lib/stdlib/test/io_SUITE.erl25
-rw-r--r--lib/stdlib/test/lists_SUITE.erl2
-rw-r--r--lib/stdlib/test/random_iolist.erl38
-rw-r--r--lib/stdlib/test/random_unicode_list.erl38
-rw-r--r--lib/stdlib/test/re_testoutput1_replacement_test.erl2
-rw-r--r--lib/stdlib/test/re_testoutput1_split_test.erl2
-rw-r--r--lib/stdlib/test/run_pcre_tests.erl73
-rw-r--r--lib/stdlib/test/shell_SUITE.erl2
-rw-r--r--lib/stdlib/test/tar_SUITE.erl178
-rw-r--r--lib/stdlib/test/tar_SUITE_data/bsd.tarbin0 -> 9216 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/gnu.tarbin0 -> 30720 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/pax_mtime.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse00.tarbin0 -> 61440 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse01.tarbin0 -> 61440 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse01_empty.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse10.tarbin0 -> 61440 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/sparse10_empty.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/star.tarbin0 -> 10240 bytes
-rw-r--r--lib/stdlib/test/tar_SUITE_data/v7.tarbin0 -> 10240 bytes
-rw-r--r--lib/syntax_tools/src/igor.erl12
-rw-r--r--lib/tools/emacs/erlang-edoc.el12
-rw-r--r--lib/tools/emacs/erlang-eunit.el48
-rw-r--r--lib/tools/emacs/erlang-pkg.el4
-rw-r--r--lib/tools/emacs/erlang-start.el24
-rw-r--r--lib/tools/emacs/erlang-test.el91
-rw-r--r--lib/tools/emacs/erlang.el935
-rw-r--r--lib/tools/emacs/erldoc.el10
-rw-r--r--lib/typer/src/typer.erl36
-rw-r--r--lib/wx/api_gen/gen_util.erl2
-rw-r--r--lib/wx/api_gen/wx_gen_cpp.erl2
-rw-r--r--lib/xmerl/src/xmerl_regexp.erl2
-rw-r--r--lib/xmerl/src/xmerl_scan.erl9
-rw-r--r--lib/xmerl/test/xmerl_SUITE.erl33
412 files changed, 10042 insertions, 13045 deletions
diff --git a/lib/.gitignore b/lib/.gitignore
index b1da61706d..283393faa9 100644
--- a/lib/.gitignore
+++ b/lib/.gitignore
@@ -526,14 +526,6 @@
/orber/src/oe_erlang.erl
/orber/src/oe_erlang.hrl
-# percept
-
-/percept/doc/src/egd.xml
-/percept/doc/src/egd_ug.xml
-/percept/doc/src/percept.xml
-/percept/doc/src/percept_profile.xml
-/percept/doc/src/percept_ug.xml
-
# snmp
snmp/doc/intex.html
diff --git a/lib/Makefile b/lib/Makefile
index a7f3c9192f..4740e6eb59 100644
--- a/lib/Makefile
+++ b/lib/Makefile
@@ -35,7 +35,7 @@ ALL_ERLANG_APPLICATIONS = xmerl edoc erl_docgen snmp otp_mibs erl_interface \
public_key ssl observer odbc diameter \
cosTransactions cosEvent cosTime cosNotification \
cosProperty cosFileTransfer cosEventDomain et megaco \
- eunit ssh typer percept eldap dialyzer hipe
+ eunit ssh typer eldap dialyzer hipe
ifdef BUILD_ALL
ERLANG_APPLICATIONS += $(ALL_ERLANG_APPLICATIONS)
diff --git a/lib/asn1/doc/src/asn1_getting_started.xml b/lib/asn1/doc/src/asn1_getting_started.xml
index d40b294c39..d2b73d63c3 100644
--- a/lib/asn1/doc/src/asn1_getting_started.xml
+++ b/lib/asn1/doc/src/asn1_getting_started.xml
@@ -187,6 +187,14 @@ erlc -o ../asnfiles -I ../asnfiles -I /usr/local/standards/asn1 Person.asn</pre>
<item>
<p>DER encoding rule. Only when using option <c>-ber</c>.</p>
</item>
+ <tag><c>+maps</c></tag>
+ <item>
+ <p>Use maps instead of records to represent the <c>SEQUENCE</c> and
+ <c>SET</c> types. No <c>.hrl</c> files will be generated.
+ See the Section <seealso marker="asn1_getting_started#MAP_SEQ_SET">
+ Map representation for SEQUENCE and SET</seealso>
+ for more information.</p>
+ </item>
<tag><c>+asn1config</c></tag>
<item>
<p>This functionality works together with option
@@ -766,8 +774,11 @@ Pdu ::= SEQUENCE {
b REAL,
c OBJECT IDENTIFIER,
d NULL } </pre>
- <p>This is a 4-component structure called <c>Pdu</c>. The record format
- is the major format for representation of <c>SEQUENCE</c> in Erlang.
+ <p>This is a 4-component structure called <c>Pdu</c>. By default,
+ a <c>SEQUENCE</c> is represented by a record in Erlang.
+ It can also be represented as a map; see
+ <seealso marker="asn1_getting_started#MAP_SEQ_SET">
+ Map representation for SEQUENCE and SET</seealso>.
For each <c>SEQUENCE</c> and <c>SET</c> in an ASN.1 module an Erlang
record declaration is generated. For <c>Pdu</c>, a record
like the following is defined:</p>
@@ -878,6 +889,48 @@ SExt ::= SEQUENCE {
</section>
<section>
+ <marker id="MAP_SEQ_SET"></marker>
+ <title>Map representation for SEQUENCE and SET</title>
+ <p>If the ASN.1 module has been compiled with option <c>maps</c>,
+ the types <c>SEQUENCE</c> and <c>SET</c> are represented as maps.</p>
+ <p>In the following example, this ASN.1 specification is used:</p>
+ <pre>
+File DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+Seq1 ::= SEQUENCE {
+ a INTEGER DEFAULT 42,
+ b BOOLEAN OPTIONAL,
+ c IA5String
+}
+END </pre>
+
+ <p>Optional fields are to be omitted from the map if they have
+ no value:</p>
+
+ <pre>
+1> <input>asn1ct:compile('File', [per,maps]).</input>
+ok
+2> <input>{ok,E} = 'File':encode('Seq1', #{a=>0,c=>"string"}).</input>
+{ok,&lt;&lt;128,1,0,6,115,116,114,105,110,103&gt;&gt;} </pre>
+
+ <p>When decoding, optional fields will be omitted from the map:</p>
+
+ <pre>
+3> <input>'File':decode('Seq1', E).</input>
+{ok,#{a => 0,c => "string"}} </pre>
+
+ <p>Default values can be omitted from the map:</p>
+ <pre>
+4> <input>{ok,E2} = 'File':encode('Seq1', #{c=>"string"}).</input>
+{ok,&lt;&lt;0,6,115,116,114,105,110,103&gt;&gt;}
+5> <input>'File':decode('Seq1', E2).</input>
+{ok,#{a => 42,c => "string"}} </pre>
+
+ <note><p>It is not allowed to use the atoms <c>asn1_VALUE</c> and
+ <c>asn1_DEFAULT</c> with maps.</p></note>
+ </section>
+
+ <section>
<marker id="CHOICE"></marker>
<title>CHOICE</title>
<p>The type <c>CHOICE</c> is a space saver and is similar to the
@@ -1004,11 +1057,16 @@ T ::= CHOICE {
<section>
<title>Naming of Records in .hrl Files</title>
+ <p>When the option <c>maps</c> is given, no <c>.hrl</c> files
+ will be generated. The rest of this section describes the behavior
+ of the compiler when <c>maps</c> is not used.</p>
+
<p>When an ASN.1 specification is compiled, all defined types of type
- <c>SET</c> or <c>SEQUENCE</c> result in a corresponding record in the
- generated <c>.hrl</c> file. This is because the values for
- <c>SET</c> and <c>SEQUENCE</c> are represented as records as
- mentioned earlier.</p>
+ <c>SET</c> or <c>SEQUENCE</c> result in a corresponding record in the
+ generated <c>.hrl</c> file. This is because the values for
+ <c>SET</c> and <c>SEQUENCE</c> are represented as records
+ by default.</p>
+
<p>Some special cases of this functionality are presented in the
next section.</p>
@@ -1144,9 +1202,10 @@ SS ::= SET {
<p>This example shows that a function is generated by the compiler
that returns a valid Erlang representation of the value, although
the value is of a complex type.</p>
- <p>Furthermore, a macro is generated for each value in the <c>.hrl</c>
- file. So, the defined value <c>tt</c> can also be extracted by
- <c>?tt</c> in application code.</p>
+ <p>Furthermore, if the option <c>maps</c> is not used,
+ a macro is generated for each value in the <c>.hrl</c>
+ file. So, the defined value <c>tt</c> can also be extracted by
+ <c>?tt</c> in application code.</p>
</section>
<section>
diff --git a/lib/asn1/doc/src/asn1ct.xml b/lib/asn1/doc/src/asn1ct.xml
index ebe1ce44dc..859d6a50bb 100644
--- a/lib/asn1/doc/src/asn1ct.xml
+++ b/lib/asn1/doc/src/asn1ct.xml
@@ -170,11 +170,24 @@ File3.asn</pre>
as for <c>ber</c>.
</p>
</item>
+ <tag><c>maps</c></tag>
+ <item>
+ <p>This option changes the representation of the types
+ <c>SEQUENCE</c> and <c>SET</c> to use maps (instead of
+ records). This option also suppresses the generation of
+ <c>.hrl</c> files.</p>
+ <p>For details, see Section
+ <seealso marker="asn1_getting_started#MAP_SEQ_SET">
+ Map representation for SEQUENCE and SET</seealso>
+ in the User's Guide.
+ </p>
+ </item>
<tag><c>compact_bit_string</c></tag>
<item>
<p>
The <c>BIT STRING</c> type is decoded to "compact notation".
<em>This option is not recommended for new code.</em>
+ This option cannot be combined with the option <c>maps</c>.
</p>
<p>For details, see Section
<seealso marker="asn1_getting_started#BIT STRING">
@@ -188,6 +201,7 @@ File3.asn</pre>
The <c>BIT STRING</c> type is decoded to the legacy
format, that is, a list of zeroes and ones.
<em>This option is not recommended for new code.</em>
+ This option cannot be combined with the option <c>maps</c>.
</p>
<p>For details, see Section
<seealso marker="asn1_getting_started#BIT STRING">BIT STRING</seealso>
@@ -202,7 +216,8 @@ File3.asn</pre>
marker="asn1_getting_started#BIT STRING">BIT STRING</seealso> and Section
<seealso marker="asn1_getting_started#OCTET STRING">OCTET
STRING</seealso> in the User's Guide.</p>
- <p><em>This option is not recommended for new code.</em></p>
+ <p><em>This option is not recommended for new code.</em>
+ This option cannot be combined with the option <c>maps</c>.</p>
</item>
<tag><c>{n2n, EnumTypeName}</c></tag>
<item>
diff --git a/lib/asn1/examples/recordnames.txt b/lib/asn1/examples/recordnames.txt
index 78e30ab510..9b890b4aa7 100644
--- a/lib/asn1/examples/recordnames.txt
+++ b/lib/asn1/examples/recordnames.txt
@@ -1,6 +1,6 @@
For each ASN1 types SET and SEQUENCE a record is generated in the .hrl
file with the same name as the corresponding type.
-A decoded value is also returned as a record with the apropriate name.
+A decoded value is also returned as a record with the appropriate name.
An internally defined type as the type in component 'a' in the
following example will result in a record with name 'Seq_a':
diff --git a/lib/asn1/src/asn1_db.erl b/lib/asn1/src/asn1_db.erl
index 869ea310aa..a3e45ca915 100644
--- a/lib/asn1/src/asn1_db.erl
+++ b/lib/asn1/src/asn1_db.erl
@@ -20,7 +20,7 @@
%%
-module(asn1_db).
--export([dbstart/1,dbnew/2,dbload/1,dbload/3,dbsave/2,dbput/2,
+-export([dbstart/1,dbnew/3,dbload/1,dbload/4,dbsave/2,dbput/2,
dbput/3,dbget/2]).
-export([dbstop/0]).
@@ -37,13 +37,13 @@ dbstart(Includes0) ->
put(?MODULE, spawn_link(fun() -> init(Parent, Includes) end)),
ok.
-dbload(Module, Erule, Mtime) ->
- req({load, Module, Erule, Mtime}).
+dbload(Module, Erule, Maps, Mtime) ->
+ req({load, Module, {Erule,Maps}, Mtime}).
dbload(Module) ->
req({load, Module, any, {{0,0,0},{0,0,0}}}).
-dbnew(Module, Erule) -> req({new, Module, Erule}).
+dbnew(Module, Erule, Maps) -> req({new, Module, {Erule,Maps}}).
dbsave(OutFile, Module) -> cast({save, OutFile, Module}).
dbput(Module, K, V) -> cast({set, Module, K, V}).
dbput(Module, Kvs) -> cast({set, Module, Kvs}).
@@ -110,19 +110,19 @@ loop(#state{parent = Parent, monitor = MRef, table = Table,
ok = ets:tab2file(Mtab, TempFile),
ok = file:rename(TempFile, OutFile),
loop(State);
- {From, {new, Mod, Erule}} ->
+ {From, {new, Mod, EruleMaps}} ->
[] = ets:lookup(Table, Mod), %Assertion.
ModTableId = ets:new(list_to_atom(lists:concat(["asn1_",Mod])), []),
ets:insert(Table, {Mod, ModTableId}),
- ets:insert(ModTableId, {?MAGIC_KEY, info(Erule)}),
+ ets:insert(ModTableId, {?MAGIC_KEY, info(EruleMaps)}),
reply(From, ok),
loop(State);
- {From, {load, Mod, Erule, Mtime}} ->
+ {From, {load, Mod, EruleMaps, Mtime}} ->
case ets:member(Table, Mod) of
true ->
reply(From, ok);
false ->
- case load_table(Mod, Erule, Mtime, Includes) of
+ case load_table(Mod, EruleMaps, Mtime, Includes) of
{ok, ModTableId} ->
ets:insert(Table, {Mod, ModTableId}),
reply(From, ok);
@@ -151,20 +151,20 @@ lookup(Tab, K) ->
[{K,V}] -> V
end.
-info(Erule) ->
- {asn1ct:vsn(),Erule}.
+info(EruleMaps) ->
+ {asn1ct:vsn(),EruleMaps}.
-load_table(Mod, Erule, Mtime, Includes) ->
+load_table(Mod, EruleMaps, Mtime, Includes) ->
Base = lists:concat([Mod, ".asn1db"]),
case path_find(Includes, Mtime, Base) of
error ->
error;
- {ok,ModTab} when Erule =:= any ->
+ {ok,ModTab} when EruleMaps =:= any ->
{ok,ModTab};
{ok,ModTab} ->
Vsn = asn1ct:vsn(),
case ets:lookup(ModTab, ?MAGIC_KEY) of
- [{_,{Vsn,Erule}}] ->
+ [{_,{Vsn,EruleMaps}}] ->
%% Correct version and encoding rule.
{ok,ModTab};
_ ->
diff --git a/lib/asn1/src/asn1_records.hrl b/lib/asn1/src/asn1_records.hrl
index af10c1771c..06a9e3ab03 100644
--- a/lib/asn1/src/asn1_records.hrl
+++ b/lib/asn1/src/asn1_records.hrl
@@ -28,6 +28,7 @@
-define('COMPLETE_ENCODE',1).
-define('TLV_DECODE',2).
+-define(MISSING_IN_MAP, asn1__MISSING_IN_MAP).
-record(module,{pos,name,defid,tagdefault='EXPLICIT',exports={exports,[]},imports={imports,[]}, extensiondefault=empty,typeorval}).
@@ -96,6 +97,28 @@
error_context %Top-level thingie (contains line numbers)
}).
+%% Code generation parameters and options.
+-record(gen,
+ {erule=ber :: 'ber' | 'per',
+ der=false :: boolean(),
+ aligned=false :: boolean(),
+ rec_prefix="" :: string(),
+ macro_prefix="" :: string(),
+ pack=record :: 'record' | 'map',
+ options=[] :: [any()]
+ }).
+
+%% Abstract intermediate representation.
+-record(abst,
+ {name :: module(), %Name of module.
+ types, %Types.
+ values, %Values.
+ ptypes, %Parameterized types.
+ classes, %Classes.
+ objects, %Objects.
+ objsets %Object sets.
+ }).
+
%% state record used by back-end at partial decode
%% active is set to 'yes' when a partial decode function is generated.
%% prefix is set to 'dec-inc-' or 'dec-partial-' is for
diff --git a/lib/asn1/src/asn1ct.erl b/lib/asn1/src/asn1ct.erl
index 4e030861f5..9f77a557e5 100644
--- a/lib/asn1/src/asn1ct.erl
+++ b/lib/asn1/src/asn1ct.erl
@@ -193,7 +193,7 @@ check_pass(#st{code=M,file=File,includes=Includes,
erule=Erule,dbfile=DbFile,opts=Opts,
inputmodules=InputModules}=St) ->
start(Includes),
- case asn1ct_check:storeindb(#state{erule=Erule}, M) of
+ case asn1ct_check:storeindb(#state{erule=Erule,options=Opts}, M) of
ok ->
Module = asn1_db:dbget(M#module.name, 'MODULE'),
State = #state{mname=Module#module.name,
@@ -216,8 +216,8 @@ check_pass(#st{code=M,file=File,includes=Includes,
{error,St#st{error=Reason}}
end.
-save_pass(#st{code=M,erule=Erule}=St) ->
- ok = asn1ct_check:storeindb(#state{erule=Erule}, M),
+save_pass(#st{code=M,erule=Erule,opts=Opts}=St) ->
+ ok = asn1ct_check:storeindb(#state{erule=Erule,options=Opts}, M),
{ok,St}.
parse_listing(#st{code=Code,outfile=OutFile0}=St) ->
@@ -236,12 +236,8 @@ abs_listing(#st{code={M,_},outfile=OutFile}) ->
generate_pass(#st{code=Code,outfile=OutFile,erule=Erule,opts=Opts}=St0) ->
St = St0#st{code=undefined}, %Reclaim heap space
- case generate(Code, OutFile, Erule, Opts) of
- {error,Reason} ->
- {error,St#st{error=Reason}};
- ok ->
- {ok,St}
- end.
+ generate(Code, OutFile, Erule, Opts),
+ {ok,St}.
compile_pass(#st{outfile=OutFile,opts=Opts0}=St) ->
asn1_db:dbstop(), %Reclaim memory.
@@ -834,37 +830,55 @@ delete_double_of_symbol1([],Acc) ->
%%***********************************
-generate({M,GenTOrV}, OutFile, EncodingRule, Options) ->
+generate({M,CodeTuple}, OutFile, EncodingRule, Options) ->
+ {Types,Values,Ptypes,Classes,Objects,ObjectSets} = CodeTuple,
+ Code = #abst{name=M#module.name,
+ types=Types,values=Values,ptypes=Ptypes,
+ classes=Classes,objects=Objects,objsets=ObjectSets},
debug_on(Options),
setup_bit_string_format(Options),
setup_legacy_erlang_types(Options),
- put(encoding_options,Options),
asn1ct_table:new(check_functions),
+ Gen = init_gen_record(EncodingRule, Options),
+
+ check_maps_option(Gen),
+
%% create decoding function names and taglists for partial decode
- case (catch specialized_decode_prepare(EncodingRule,M,GenTOrV,Options)) of
- {error, Reason} -> warning("Error in configuration file: ~n~p~n",
- [Reason], Options,
- "Error in configuration file");
- _ -> ok
+ try
+ specialized_decode_prepare(Gen, M)
+ catch
+ throw:{error, Reason} ->
+ warning("Error in configuration file: ~n~p~n",
+ [Reason], Options,
+ "Error in configuration file")
end,
- Result =
- case (catch asn1ct_gen:pgen(OutFile,EncodingRule,
- M#module.name,GenTOrV,Options)) of
- {'EXIT',Reason2} ->
- error("~p~n",[Reason2],Options),
- {error,Reason2};
- _ ->
- ok
- end,
+ asn1ct_gen:pgen(OutFile, Gen, Code),
debug_off(Options),
- erase(encoding_options),
cleanup_bit_string_format(),
erase(tlv_format), % used in ber
erase(class_default_type),% used in ber
asn1ct_table:delete(check_functions),
- Result.
+ ok.
+
+init_gen_record(EncodingRule, Options) ->
+ Erule = case EncodingRule of
+ uper -> per;
+ _ -> EncodingRule
+ end,
+ Der = proplists:get_bool(der, Options),
+ Aligned = EncodingRule =:= per,
+ RecPrefix = proplists:get_value(record_name_prefix, Options, ""),
+ MacroPrefix = proplists:get_value(macro_name_prefix, Options, ""),
+ Pack = case proplists:get_value(maps, Options, false) of
+ true -> map;
+ false -> record
+ end,
+ #gen{erule=Erule,der=Der,aligned=Aligned,
+ rec_prefix=RecPrefix,macro_prefix=MacroPrefix,
+ pack=Pack,options=Options}.
+
setup_legacy_erlang_types(Opts) ->
F = case lists:member(legacy_erlang_types, Opts) of
@@ -910,6 +924,26 @@ cleanup_bit_string_format() ->
get_bit_string_format() ->
get(bit_string_format).
+check_maps_option(#gen{pack=map}) ->
+ case get_bit_string_format() of
+ bitstring ->
+ ok;
+ _ ->
+ Message1 = "The 'maps' option must not be combined with "
+ "'compact_bit_string' or 'legacy_bit_string'",
+ exit({error,{asn1,Message1}})
+ end,
+ case use_legacy_types() of
+ false ->
+ ok;
+ true ->
+ Message2 = "The 'maps' option must not be combined with "
+ "'legacy_erlang_types'",
+ exit({error,{asn1,Message2}})
+ end;
+check_maps_option(#gen{}) ->
+ ok.
+
%% parse_and_save parses an asn1 spec and saves the unchecked parse
%% tree in a data base file.
@@ -919,22 +953,27 @@ parse_and_save(Module,S) ->
SourceDir = S#state.sourcedir,
Includes = [I || {i,I} <- Options],
Erule = S#state.erule,
+ Maps = lists:member(maps, Options),
case get_input_file(Module, [SourceDir|Includes]) of
%% search for asn1 source
{file,SuffixedASN1source} ->
Mtime = filelib:last_modified(SuffixedASN1source),
- case asn1_db:dbload(Module, Erule, Mtime) of
+ case asn1_db:dbload(Module, Erule, Maps, Mtime) of
ok -> ok;
error -> parse_and_save1(S, SuffixedASN1source, Options)
end;
- Err ->
+ Err when not Maps ->
case asn1_db:dbload(Module) of
ok ->
+ %% FIXME: This should be an error.
warning("could not do a consistency check of the ~p file: no asn1 source file was found.~n",
[lists:concat([Module,".asn1db"])],Options);
error ->
ok
end,
+ {error,{asn1,input_file_error,Err}};
+ Err ->
+ %% Always fail directly when the 'maps' option is used.
{error,{asn1,input_file_error,Err}}
end.
@@ -997,9 +1036,8 @@ input_file_type(File) ->
end
end;
".asn1config" ->
- case read_config_file(File,asn1_module) of
+ case read_config_file_info(File, asn1_module) of
{ok,Asn1Module} ->
-% put(asn1_config_file,File),
input_file_type(Asn1Module);
Error ->
Error
@@ -1092,16 +1130,27 @@ translate_options([H|T]) ->
translate_options([]) -> [].
remove_asn_flags(Options) ->
- [X || X <- Options,
- X /= get_rule(Options),
- X /= optimize,
- X /= compact_bit_string,
- X /= legacy_bit_string,
- X /= legacy_erlang_types,
- X /= debug,
- X /= asn1config,
- X /= record_name_prefix].
-
+ [X || X <- Options, not is_asn1_flag(X)].
+
+is_asn1_flag(asn1config) -> true;
+is_asn1_flag(ber) -> true;
+is_asn1_flag(compact_bit_string) -> true;
+is_asn1_flag(debug) -> true;
+is_asn1_flag(der) -> true;
+is_asn1_flag(legacy_bit_string) -> true;
+is_asn1_flag({macro_name_prefix,_}) -> true;
+is_asn1_flag({n2n,_}) -> true;
+is_asn1_flag(noobj) -> true;
+is_asn1_flag(no_ok_wrapper) -> true;
+is_asn1_flag(optimize) -> true;
+is_asn1_flag(per) -> true;
+is_asn1_flag({record_name_prefix,_}) -> true;
+is_asn1_flag(undec_rec) -> true;
+is_asn1_flag(uper) -> true;
+is_asn1_flag(verbose) -> true;
+%% 'warnings_as_errors' is intentionally passed through to the compiler.
+is_asn1_flag(_) -> false.
+
debug_on(Options) ->
case lists:member(debug,Options) of
true ->
@@ -1370,25 +1419,26 @@ prepare_bytes(Bytes) -> list_to_binary(Bytes).
vsn() ->
?vsn.
-specialized_decode_prepare(Erule,M,TsAndVs,Options) ->
- case lists:member(asn1config,Options) of
+specialized_decode_prepare(#gen{erule=ber,options=Options}=Gen, M) ->
+ case lists:member(asn1config, Options) of
true ->
- partial_decode_prepare(Erule,M,TsAndVs,Options);
- _ ->
+ special_decode_prepare_1(Gen, M);
+ false ->
ok
- end.
+ end;
+specialized_decode_prepare(_, _) ->
+ ok.
+
%% Reads the configuration file if it exists and stores information
%% about partial decode and incomplete decode
-partial_decode_prepare(ber,M,TsAndVs,Options) when is_tuple(TsAndVs) ->
+special_decode_prepare_1(#gen{options=Options}=Gen, M) ->
%% read configure file
-
- ModName =
- case lists:keysearch(asn1config,1,Options) of
- {value,{_,MName}} -> MName;
- _ -> M#module.name
- end,
+ ModName = case lists:keyfind(asn1config, 1, Options) of
+ {_,MName} -> MName;
+ false -> M#module.name
+ end,
%% io:format("ModName: ~p~nM#module.name: ~p~n~n",[ModName,M#module.name]),
- case read_config_file(ModName) of
+ case read_config_file(Gen, ModName) of
no_config_file ->
ok;
CfgList ->
@@ -1407,11 +1457,7 @@ partial_decode_prepare(ber,M,TsAndVs,Options) when is_tuple(TsAndVs) ->
Part_inc_tlv_tags = tlv_tags(CommandList2),
save_config(partial_incomplete_decode,Part_inc_tlv_tags),
save_gen_state(exclusive_decode,ExclusiveDecode,Part_inc_tlv_tags)
- end;
-partial_decode_prepare(_,_,_,_) ->
- ok.
-
-
+ end.
%% create_partial_inc_decode_gen_info/2
%%
@@ -1863,46 +1909,38 @@ tlv_tag1(<<0:1,PartialTag:7>>,Acc) ->
tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) ->
tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag).
-%% reads the content from the configuration file and returns the
-%% selected part choosen by InfoType. Assumes that the config file
+%% Reads the content from the configuration file and returns the
+%% selected part chosen by InfoType. Assumes that the config file
%% content is an Erlang term.
-read_config_file(ModuleName,InfoType) when is_atom(InfoType) ->
- CfgList = read_config_file(ModuleName),
- get_config_info(CfgList,InfoType).
+read_config_file_info(ModuleName, InfoType) when is_atom(InfoType) ->
+ Name = ensure_ext(ModuleName, ".asn1config"),
+ CfgList = read_config_file0(Name, []),
+ get_config_info(CfgList, InfoType).
+read_config_file(#gen{options=Options}, ModuleName) ->
+ Name = ensure_ext(ModuleName, ".asn1config"),
+ Includes = [I || {i,I} <- Options],
+ read_config_file0(Name, ["."|Includes]).
-read_config_file(ModuleName) ->
- case file:consult(lists:concat([ModuleName,'.asn1config'])) of
+read_config_file0(Name, [D|Dirs]) ->
+ case file:consult(filename:join(D, Name)) of
{ok,CfgList} ->
CfgList;
{error,enoent} ->
- Options = get(encoding_options),
- Includes = [I || {i,I} <- Options],
- read_config_file1(ModuleName,Includes);
+ read_config_file0(Name, Dirs);
{error,Reason} ->
Error = "error reading asn1 config file: " ++
file:format_error(Reason),
throw({error,Error})
- end.
-read_config_file1(ModuleName,[]) ->
- case filename:extension(ModuleName) of
- ".asn1config" ->
- no_config_file;
- _ ->
- read_config_file(lists:concat([ModuleName,".asn1config"]))
end;
-read_config_file1(ModuleName,[H|T]) ->
-% File = filename:join([H,lists:concat([ModuleName,'.asn1config'])]),
- File = filename:join([H,ModuleName]),
- case file:consult(File) of
- {ok,CfgList} ->
- CfgList;
- {error,enoent} ->
- read_config_file1(ModuleName,T);
- {error,Reason} ->
- Error = "error reading asn1 config file: " ++
- file:format_error(Reason),
- throw({error,Error})
+read_config_file0(_, []) ->
+ no_config_file.
+
+ensure_ext(ModuleName, Ext) ->
+ Name = filename:join([ModuleName]),
+ case filename:extension(Name) of
+ Ext -> Name;
+ _ -> Name ++ Ext
end.
get_config_info(CfgList,InfoType) ->
@@ -2382,8 +2420,10 @@ format_error({write_error,File,Reason}) ->
io_lib:format("writing output file ~s failed: ~s",
[File,file:format_error(Reason)]).
-is_error(S) when is_record(S, state) ->
- is_error(S#state.options);
+is_error(#state{options=Opts}) ->
+ is_error(Opts);
+is_error(#gen{options=Opts}) ->
+ is_error(Opts);
is_error(O) ->
lists:member(errors, O) orelse is_verbose(O).
@@ -2392,8 +2432,10 @@ is_warning(S) when is_record(S, state) ->
is_warning(O) ->
lists:member(warnings, O) orelse is_verbose(O).
-is_verbose(S) when is_record(S, state) ->
- is_verbose(S#state.options);
+is_verbose(#state{options=Opts}) ->
+ is_verbose(Opts);
+is_verbose(#gen{options=Opts}) ->
+ is_verbose(Opts);
is_verbose(O) ->
lists:member(verbose, O).
diff --git a/lib/asn1/src/asn1ct_check.erl b/lib/asn1/src/asn1ct_check.erl
index f2c895bfaa..321f4147f5 100644
--- a/lib/asn1/src/asn1ct_check.erl
+++ b/lib/asn1/src/asn1ct_check.erl
@@ -2239,12 +2239,18 @@ normalized_record(SorS,S,Value,Components,NameList) ->
case is_record_normalized(S,NewName,Value,length(Components)) of
true ->
Value;
- _ ->
+ false ->
NoComps = length(Components),
ListOfVals = normalize_seq_or_set(SorS,S,Value,Components,NameList,[]),
- NoComps = length(ListOfVals), %% Assert
- list_to_tuple([NewName|ListOfVals])
+ NoComps = length(ListOfVals), %Assertion.
+ case use_maps(S) of
+ false ->
+ list_to_tuple([NewName|ListOfVals]);
+ true ->
+ create_map_value(Components, ListOfVals)
+ end
end.
+
is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) ->
case get_referenced_type(S,V) of
{_M,#valuedef{type=_T1,value=V2}} ->
@@ -2253,9 +2259,20 @@ is_record_normalized(S,Name,V = #'Externalvaluereference'{},NumComps) ->
end;
is_record_normalized(_S,Name,Value,NumComps) when is_tuple(Value) ->
(tuple_size(Value) =:= (NumComps + 1)) andalso (element(1, Value) =:= Name);
+is_record_normalized(_S, _Name, Value, _NumComps) when is_map(Value) ->
+ true;
is_record_normalized(_,_,_,_) ->
false.
+use_maps(#state{options=Opts}) ->
+ lists:member(maps, Opts).
+
+create_map_value(Components, ListOfVals) ->
+ Zipped = lists:zip(Components, ListOfVals),
+ L = [{Name,V} || {#'ComponentType'{name=Name},V} <- Zipped,
+ V =/= asn1_NOVALUE],
+ maps:from_list(L).
+
normalize_seq_or_set(SorS, S,
[{#seqtag{val=Cname},V}|Vs],
[#'ComponentType'{name=Cname,typespec=TS}|Cs],
@@ -4192,7 +4209,7 @@ iof_associated_type1(S,C) ->
%% fieldname=[{typefieldreference,'Type'}],
fieldname={'Type',[]},
type=Typefield_type},
- IOFComponents =
+ IOFComponents0 =
[#'ComponentType'{name='type-id',
typespec=#type{tag=C1TypeTag,
def=ObjectIdentifier,
@@ -4209,6 +4226,7 @@ iof_associated_type1(S,C) ->
tablecinf=Comp2tablecinf},
prop=mandatory,
tags=[{'CONTEXT',0}]}],
+ IOFComponents = textual_order(IOFComponents0),
#'SEQUENCE'{tablecinf=TableCInf,
components=simplify_comps(IOFComponents)}.
@@ -5673,7 +5691,8 @@ storeindb(S0, #module{name=ModName,typeorval=TVlist0}=M) ->
storeindb_1(S, #module{name=ModName}=M, TVlist0, TVlist) ->
NewM = M#module{typeorval=findtypes_and_values(TVlist0)},
- asn1_db:dbnew(ModName, S#state.erule),
+ Maps = lists:member(maps, S#state.options),
+ asn1_db:dbnew(ModName, S#state.erule, Maps),
asn1_db:dbput(ModName, 'MODULE', NewM),
asn1_db:dbput(ModName, TVlist),
include_default_class(S, NewM#module.name),
diff --git a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
index 325bea5879..16af09bca9 100644
--- a/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_constructed_ber_bin_v2.erl
@@ -32,7 +32,7 @@
-include("asn1_records.hrl").
--import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/0]).
+-import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/1]).
-define(ASN1CT_GEN_BER,asn1ct_gen_ber_bin_v2).
@@ -57,7 +57,7 @@
%%===============================================================================
%%===============================================================================
-gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
+gen_encode_sequence(Gen, Typename, #type{}=D) ->
asn1ct_name:start(),
asn1ct_name:new(term),
asn1ct_name:new(bytes),
@@ -67,8 +67,12 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
ValName =
case Typename of
['EXTERNAL'] ->
+ Tr = case Gen of
+ #gen{pack=record} -> transform_to_EXTERNAL1990;
+ #gen{pack=map} -> transform_to_EXTERNAL1990_maps
+ end,
emit([indent(4),"NewVal = ",
- {call,ext,transform_to_EXTERNAL1990,["Val"]},
+ {call,ext,Tr,["Val"]},
com,nl]),
"NewVal";
_ ->
@@ -90,18 +94,9 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
{Rl,El} -> Rl ++ El;
_ -> CompList
end,
-
-%% don't match recordname for now, because of compatibility reasons
-%% emit(["{'",asn1ct_gen:list2rname(Typename),"'"]),
- emit(["{_"]),
- case length(CompList1) of
- 0 ->
- true;
- CompListLen ->
- emit([","]),
- mkcindexlist([Tc || Tc <- lists:seq(1,CompListLen)])
- end,
- emit(["} = ",ValName,",",nl]),
+
+ enc_match_input(Gen, ValName, CompList1),
+
EncObj =
case TableConsInfo of
#simpletableattributes{usedclassfield=Used,
@@ -125,7 +120,7 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
emit([ObjectEncode," = ",nl,
" ",{asis,ObjSetMod},":'getenc_",ObjSetName,
"'("]),
- ValueMatch = value_match(ValueIndex,
+ ValueMatch = value_match(Gen, ValueIndex,
lists:concat(["Cindex",N])),
emit([indent(35),ValueMatch,"),",nl]),
{AttrN,ObjectEncode};
@@ -144,7 +139,7 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
end
end,
- gen_enc_sequence_call(Erules,Typename,CompList1,1,Ext,EncObj),
+ gen_enc_sequence_call(Gen, Typename, CompList1, 1, Ext, EncObj),
emit([nl," BytesSoFar = "]),
case SeqOrSet of
@@ -168,7 +163,36 @@ gen_encode_sequence(Erules,Typename,D) when is_record(D,type) ->
call(encode_tags, ["TagIn","BytesSoFar","LenSoFar"]),
emit([".",nl]).
-gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
+enc_match_input(#gen{pack=record}, ValName, CompList) ->
+ Len = length(CompList),
+ Vars = [lists:concat(["Cindex",N]) || N <- lists:seq(1, Len)],
+ RecordName = "_",
+ emit(["{",lists:join(",", [RecordName|Vars]),"} = ",ValName,com,nl]);
+enc_match_input(#gen{pack=map}, ValName, CompList) ->
+ Len = length(CompList),
+ Vars = [lists:concat(["Cindex",N]) || N <- lists:seq(1, Len)],
+ Zipped = lists:zip(CompList, Vars),
+ M = [[{asis,Name},":=",Var] ||
+ {#'ComponentType'{prop=mandatory,name=Name},Var} <- Zipped],
+ case M of
+ [] ->
+ ok;
+ [_|_] ->
+ emit(["#{",lists:join(",", M),"} = ",ValName,com,nl])
+ end,
+ Os0 = [{Name,Var} ||
+ {#'ComponentType'{prop=Prop,name=Name},Var} <- Zipped,
+ Prop =/= mandatory],
+ F = fun({Name,Var}) ->
+ [Var," = case ",ValName," of\n"
+ " #{",{asis,Name},":=",Var,"_0} -> ",
+ Var,"_0;\n"
+ " _ -> ",atom_to_list(?MISSING_IN_MAP),"\n"
+ "end"]
+ end,
+ emit(lists:join(",\n", [F(E) || E <- Os0]++[[]])).
+
+gen_decode_sequence(Gen, Typename, #type{}=D) ->
asn1ct_name:start(),
asn1ct_name:new(tag),
#'SEQUENCE'{tablecinf=TableConsInfo,components=CList0} = D#type.def,
@@ -225,15 +249,20 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
_ ->
{false,false}
end,
- RecordName = lists:concat([get_record_name_prefix(),
- asn1ct_gen:list2rname(Typename)]),
- case gen_dec_sequence_call(Erules,Typename,CompList2,Ext,DecObjInf) of
- no_terms -> % an empty sequence
- emit([nl,nl]),
- demit(["Result = "]), %dbg
- %% return value as record
+ RecordName0 = lists:concat([get_record_name_prefix(Gen),
+ asn1ct_gen:list2rname(Typename)]),
+ RecordName = list_to_atom(RecordName0),
+ case gen_dec_sequence_call(Gen, Typename, CompList2, Ext, DecObjInf) of
+ no_terms -> % an empty sequence
asn1ct_name:new(rb),
- emit([" {'",RecordName,"'}.",nl,nl]);
+ case Gen of
+ #gen{pack=record} ->
+ emit([nl,nl,
+ " {'",RecordName,"'}.",nl,nl]);
+ #gen{pack=map} ->
+ emit([nl,nl,
+ " #{}.",nl,nl])
+ end;
{LeadingAttrTerm,PostponedDecArgs} ->
emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
@@ -243,7 +272,7 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
ok;
{[{ObjSetRef,LeadingAttr,Term}],PostponedDecArgs} ->
DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
- ValueMatch = value_match(ValueIndex,Term),
+ ValueMatch = value_match(Gen, ValueIndex,Term),
{ObjSetMod,ObjSetName} = ObjSetRef,
emit([DecObj," =",nl,
" ",{asis,ObjSetMod},":'getdec_",ObjSetName,"'(",
@@ -263,22 +292,64 @@ gen_decode_sequence(Erules,Typename,D) when is_record(D,type) ->
"end,",nl])
end,
asn1ct_name:new(rb),
- case Typename of
- ['EXTERNAL'] ->
- emit([" OldFormat={'",RecordName,
- "', "]),
- mkvlist(asn1ct_name:all(term)),
- emit(["},",nl]),
- emit([" ",
- {call,ext,transform_to_EXTERNAL1994,
- ["OldFormat"]},".",nl]);
- _ ->
- emit([" {'",RecordName,"', "]),
- mkvlist(asn1ct_name:all(term)),
- emit(["}.",nl,nl])
- end
+ gen_dec_pack(Gen, RecordName, Typename, CompList),
+ emit([".",nl])
end.
+gen_dec_pack(Gen, RecordName, Typename, CompList) ->
+ case Typename of
+ ['EXTERNAL'] ->
+ dec_external(Gen, RecordName);
+ _ ->
+ asn1ct_name:new(res),
+ gen_dec_do_pack(Gen, RecordName, CompList),
+ emit([com,nl,
+ {curr,res}])
+ end.
+
+dec_external(#gen{pack=record}, RecordName) ->
+ All = [{var,Term} || Term <- asn1ct_name:all(term)],
+ Record = [{asis,RecordName}|All],
+ emit(["OldFormat={",lists:join(",", Record),"},",nl,
+ {call,ext,transform_to_EXTERNAL1994,
+ ["OldFormat"]}]);
+dec_external(#gen{pack=map}, _RecordName) ->
+ Vars = asn1ct_name:all(term),
+ Names = ['direct-reference','indirect-reference',
+ 'data-value-descriptor',encoding],
+ Zipped = lists:zip(Names, Vars),
+ MapInit = lists:join(",", [["'",N,"'=>",{var,V}] || {N,V} <- Zipped]),
+ emit(["OldFormat = #{",MapInit,"}",com,nl,
+ "ASN11994Format =",nl,
+ {call,ext,transform_to_EXTERNAL1994_maps,
+ ["OldFormat"]}]).
+
+gen_dec_do_pack(#gen{pack=record}, RecordName, _CompList) ->
+ All = asn1ct_name:all(term),
+ L = [{asis,RecordName}|[{var,Var} || Var <- All]],
+ emit([{curr,res}," = {",lists:join(",", L),"}"]);
+gen_dec_do_pack(#gen{pack=map}, _, CompList) ->
+ Zipped = lists:zip(CompList, asn1ct_name:all(term)),
+ PF = fun({#'ComponentType'{prop='OPTIONAL'},_}) -> false;
+ ({_,_}) -> true
+ end,
+ {Mandatory,Optional} = lists:partition(PF, Zipped),
+ L = [[{asis,Name},"=>",{var,Var}] ||
+ {#'ComponentType'{name=Name},Var} <- Mandatory],
+ emit([{curr,res}," = #{",lists:join(",", L),"}"]),
+ gen_dec_map_optional(Optional).
+
+gen_dec_map_optional([{#'ComponentType'{name=Name},Var}|T]) ->
+ asn1ct_name:new(res),
+ emit([com,nl,
+ {curr,res}," = case ",{var,Var}," of",nl,
+ " asn1_NOVALUE -> ",{prev,res},";",nl,
+ " _ -> ",{prev,res},"#{",{asis,Name},"=>",{var,Var},"}",nl,
+ "end"]),
+ gen_dec_map_optional(T);
+gen_dec_map_optional([]) ->
+ ok.
+
gen_dec_postponed_decs(_,[]) ->
emit(nl);
gen_dec_postponed_decs(DecObj,[{_Cname,{FirstPFN,PFNList},Term,
@@ -327,7 +398,7 @@ emit_opt_or_mand_check(Value,TmpTerm) ->
gen_encode_set(Erules,Typename,D) when is_record(D,type) ->
gen_encode_sequence(Erules,Typename,D).
-gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
+gen_decode_set(Gen, Typename, #type{}=D) ->
asn1ct_name:start(),
%% asn1ct_name:new(term),
asn1ct_name:new(tag),
@@ -393,7 +464,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
_ ->
emit(["SetFun = fun(FunTlv) ->", nl]),
emit(["case FunTlv of ",nl]),
- NextNum = gen_dec_set_cases(Erules,Typename,CompList,1),
+ NextNum = gen_dec_set_cases(Gen, Typename, CompList, 1),
emit([indent(6), {curr,else}," -> ",nl,
indent(9),"{",NextNum,", ",{curr,else},"}",nl]),
emit([indent(3),"end",nl]),
@@ -405,14 +476,17 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
asn1ct_name:new(tlv)
end,
- RecordName = lists:concat([get_record_name_prefix(),
- asn1ct_gen:list2rname(Typename)]),
- case gen_dec_sequence_call(Erules,Typename,CompList,Ext,DecObjInf) of
- no_terms -> % an empty sequence
- emit([nl,nl]),
- demit(["Result = "]), %dbg
- %% return value as record
- emit([" {'",RecordName,"'}.",nl]);
+ RecordName0 = lists:concat([get_record_name_prefix(Gen),
+ asn1ct_gen:list2rname(Typename)]),
+ RecordName = list_to_atom(RecordName0),
+ case gen_dec_sequence_call(Gen, Typename, CompList, Ext, DecObjInf) of
+ no_terms -> % an empty SET
+ case Gen of
+ #gen{pack=record} ->
+ emit([nl,nl," {'",RecordName,"'}.",nl,nl]);
+ #gen{pack=map} ->
+ emit([nl,nl," #{}.",nl,nl])
+ end;
{LeadingAttrTerm,PostponedDecArgs} ->
emit([nl]),
case {LeadingAttrTerm,PostponedDecArgs} of
@@ -422,7 +496,7 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
ok;
{[{ObjSetRef,LeadingAttr,Term}],PostponedDecArgs} ->
DecObj = asn1ct_gen:un_hyphen_var(lists:concat(['DecObj',LeadingAttr,Term])),
- ValueMatch = value_match(ValueIndex,Term),
+ ValueMatch = value_match(Gen, ValueIndex, Term),
{ObjSetMod,ObjSetName} = ObjSetRef,
emit([DecObj," =",nl,
" ",{asis,ObjSetMod},":'getdec_",ObjSetName,"'(",
@@ -441,9 +515,8 @@ gen_decode_set(Erules,Typename,D) when is_record(D,type) ->
"}}}) % extra fields not allowed",nl,
"end,",nl])
end,
- emit([" {'",RecordName,"', "]),
- mkvlist(asn1ct_name:all(term)),
- emit(["}.",nl])
+ gen_dec_pack(Gen, RecordName, Typename, CompList),
+ emit([".",nl])
end.
@@ -504,10 +577,8 @@ gen_decode_sof(Erules,TypeName,_InnerTypeName,D) when is_record(D,type) ->
emit([" || ",{curr,v}," <- ",{curr,tlv},"].",nl,nl,nl]).
-gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont)
- when is_record(Cont,type)->
-
- {Objfun,Objfun_novar,EncObj} =
+gen_encode_sof_components(Gen, Typename, SeqOrSetOf, #type{}=Cont) ->
+ {Objfun,Objfun_novar,EncObj} =
case Cont#type.tablecinf of
[{objfun,_}|_R] ->
{", ObjFun",", _",{no_attr,"ObjFun"}};
@@ -517,20 +588,19 @@ gen_encode_sof_components(Erules,Typename,SeqOrSetOf,Cont)
emit(["'enc_",asn1ct_gen:list2name(Typename),
"_components'([]",Objfun_novar,", AccBytes, AccLen) -> ",nl]),
- case catch lists:member(der,get(encoding_options)) of
- true when SeqOrSetOf=='SET OF'->
+ case {Gen,SeqOrSetOf} of
+ {#gen{der=true},'SET OF'} ->
asn1ct_func:need({ber,dynamicsort_SETOF,1}),
emit([indent(3),
"{dynamicsort_SETOF(AccBytes),AccLen};",nl,nl]);
- _ ->
+ {_,_} ->
emit([indent(3),"{lists:reverse(AccBytes),AccLen};",nl,nl])
end,
emit(["'enc_",asn1ct_gen:list2name(Typename),
"_components'([H|T]",Objfun,",AccBytes, AccLen) ->",nl]),
TypeNameSuffix = asn1ct_gen:constructed_suffix(SeqOrSetOf,Cont#type.def),
- gen_enc_line(Erules,Typename,TypeNameSuffix,Cont,"H",3,
-% mandatory,"{EncBytes,EncLen} = ",EncObj),
- mandatory,EncObj),
+ gen_enc_line(Gen, Typename, TypeNameSuffix, Cont, "H", 3,
+ mandatory, EncObj),
emit([",",nl]),
emit([indent(3),"'enc_",asn1ct_gen:list2name(Typename),
"_components'(T",Objfun,","]),
@@ -1028,35 +1098,44 @@ gen_enc_line(Erules,TopType,Cname,Type,Element,Indent,OptOrMand,Assign,EncObj)
emit([nl,indent(7),"end"])
end.
-gen_optormand_case(mandatory, _Erules, _TopType, _Cname, _Type, _Element) ->
+gen_optormand_case(mandatory, _Gen, _TopType, _Cname, _Type, _Element) ->
ok;
-gen_optormand_case('OPTIONAL', Erules, _TopType, _Cname, _Type, Element) ->
+gen_optormand_case('OPTIONAL', Gen, _TopType, _Cname, _Type, Element) ->
emit([" case ",Element," of",nl]),
- emit([indent(9),"asn1_NOVALUE -> {",
- empty_lb(Erules),",0};",nl]),
+ Missing = case Gen of
+ #gen{pack=record} -> asn1_NOVALUE;
+ #gen{pack=map} -> ?MISSING_IN_MAP
+ end,
+ emit([indent(9),Missing," -> {",
+ empty_lb(Gen),",0};",nl]),
emit([indent(9),"_ ->",nl,indent(12)]);
-gen_optormand_case({'DEFAULT',DefaultValue}, Erules, _TopType,
+gen_optormand_case({'DEFAULT',DefaultValue}, Gen, _TopType,
_Cname, Type, Element) ->
CurrMod = get(currmod),
- case catch lists:member(der,get(encoding_options)) of
- true ->
- asn1ct_gen_check:emit(Type, DefaultValue, Element);
- _ ->
- emit([" case ",Element," of",nl]),
- emit([indent(9),"asn1_DEFAULT -> {",
- empty_lb(Erules),
- ",0};",nl]),
- case DefaultValue of
- #'Externalvaluereference'{module=CurrMod,
- value=V} ->
- emit([indent(9),"?",{asis,V}," -> {",
- empty_lb(Erules),",0};",nl]);
- _ ->
- emit([indent(9),{asis,
- DefaultValue}," -> {",
- empty_lb(Erules),",0};",nl])
- end,
- emit([indent(9),"_ ->",nl,indent(12)])
+ case Gen of
+ #gen{erule=ber,der=true} ->
+ asn1ct_gen_check:emit(Gen, Type, DefaultValue, Element);
+ #gen{erule=ber,der=false,pack=Pack} ->
+ Ind9 = indent(9),
+ DefMarker = case Pack of
+ record -> asn1_DEFAULT;
+ map -> ?MISSING_IN_MAP
+ end,
+ emit([" case ",Element," of",nl,
+ Ind9,{asis,DefMarker}," ->",nl,
+ Ind9,indent(3),"{",empty_lb(Gen),",0};",nl,
+ Ind9,"_ when ",Element," =:= "]),
+ Dv = case DefaultValue of
+ #'Externalvaluereference'{module=CurrMod,
+ value=V} ->
+ ["?",{asis,V}];
+ _ ->
+ [{asis,DefaultValue}]
+ end,
+ emit(Dv++[" ->",nl,
+ Ind9,indent(3),"{",empty_lb(Gen),",0};",nl,
+ Ind9,"_ ->",nl,
+ indent(12)])
end.
%% Use for SEQUENCE OF and CHOICE.
@@ -1207,7 +1286,7 @@ gen_dec_call({typefield,_},_,_,Cname,Type,BytesVar,Tag,_,_,_DecObjInf,OptOrMandC
(Type#type.def)#'ObjectClassFieldType'.fieldname,
[{Cname,RefedFieldName,asn1ct_gen:mk_var(asn1ct_name:curr(term)),
asn1ct_gen:mk_var(asn1ct_name:curr(tmpterm)),Tag,OptOrMandComp}];
-gen_dec_call(InnerType, _Erules, TopType, Cname, Type, BytesVar,
+gen_dec_call(InnerType, Gen, TopType, Cname, Type, BytesVar,
Tag, _PrimOptOrMand, _OptOrMand, DecObjInf,_) ->
WhatKind = asn1ct_gen:type(InnerType),
gen_dec_call1(WhatKind, InnerType, TopType, Cname,
@@ -1215,7 +1294,7 @@ gen_dec_call(InnerType, _Erules, TopType, Cname, Type, BytesVar,
case DecObjInf of
{Cname,{_,OSet,_UniqueFName,ValIndex}} ->
Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
- ValueMatch = value_match(ValIndex,Term),
+ ValueMatch = value_match(Gen, ValIndex, Term),
{ObjSetMod,ObjSetName} = OSet,
emit([",",nl,"ObjFun = ",{asis,ObjSetMod},":'getdec_",ObjSetName,
"'(",ValueMatch,")"]);
@@ -1340,19 +1419,6 @@ gen_dec_call1(WhatKind, _, TopType, Cname, Type, BytesVar, Tag) ->
indent(N) ->
lists:duplicate(N,32). % 32 = space
-mkcindexlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
- emit(["Cindex",H,Sep]),
- mkcindexlist([T1|T], Sep);
-mkcindexlist([H|T], Sep) ->
- emit(["Cindex",H]),
- mkcindexlist(T, Sep);
-mkcindexlist([], _) ->
- true.
-
-mkcindexlist(L) ->
- mkcindexlist(L,", ").
-
-
mkvlist([H,T1|T], Sep) -> % Sep is a string e.g ", " or "+ "
emit([{var,H},Sep]),
mkvlist([T1|T], Sep);
@@ -1429,19 +1495,25 @@ mkfuncname(TopType,Cname,WhatKind,Prefix,Suffix) ->
{F, "?MODULE", F}
end.
-empty_lb(ber) ->
+empty_lb(#gen{erule=ber}) ->
"<<>>".
-value_match(Index,Value) when is_atom(Value) ->
- value_match(Index,atom_to_list(Value));
-value_match([],Value) ->
+value_match(#gen{pack=record}, VIs, Value) ->
+ value_match_rec(VIs, Value);
+value_match(#gen{pack=map}, VIs, Value) ->
+ value_match_map(VIs, Value).
+
+value_match_rec([], Value) ->
+ Value;
+value_match_rec([{VI,_}|VIs], Value0) ->
+ Value = value_match_rec(VIs, Value0),
+ lists:concat(["element(",VI,", ",Value,")"]).
+
+value_match_map([], Value) ->
Value;
-value_match([{VI,_}|VIs],Value) ->
- value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
-value_match1(Value,[],Acc,Depth) ->
- Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
-value_match1(Value,[{VI,_}|VIs],Acc,Depth) ->
- value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
+value_match_map([{_,Name}|VIs], Value0) ->
+ Value = value_match_map(VIs, Value0),
+ lists:concat(["maps:get(",Name,", ",Value,")"]).
call(F, Args) ->
asn1ct_func:call(ber, F, Args).
diff --git a/lib/asn1/src/asn1ct_constructed_per.erl b/lib/asn1/src/asn1ct_constructed_per.erl
index a34b25182c..b7579c8065 100644
--- a/lib/asn1/src/asn1ct_constructed_per.erl
+++ b/lib/asn1/src/asn1ct_constructed_per.erl
@@ -32,17 +32,27 @@
-include("asn1_records.hrl").
%-compile(export_all).
--import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/0]).
--import(asn1ct_func, [call/3]).
+-import(asn1ct_gen, [emit/1,demit/1,get_record_name_prefix/1]).
+
+-type type_name() :: any().
+
%% ENCODE GENERATOR FOR SEQUENCE TYPE ** **********
-gen_encode_set(Erules,TypeName,D) ->
- gen_encode_constructed(Erules,TypeName,D).
+-spec gen_encode_set(Gen, TypeName, #type{}) -> 'ok' when
+ Gen :: #gen{},
+ TypeName :: type_name().
+
+gen_encode_set(Gen, TypeName, D) ->
+ gen_encode_constructed(Gen, TypeName, D).
+
+-spec gen_encode_sequence(Gen, TypeName, #type{}) -> 'ok' when
+ Gen :: #gen{},
+ TypeName :: type_name().
-gen_encode_sequence(Erules,TypeName,D) ->
- gen_encode_constructed(Erules,TypeName,D).
+gen_encode_sequence(Gen, TypeName, D) ->
+ gen_encode_constructed(Gen, TypeName, D).
gen_encode_constructed(Erule, Typename, #type{}=D) ->
asn1ct_name:start(),
@@ -50,88 +60,23 @@ gen_encode_constructed(Erule, Typename, #type{}=D) ->
asn1ct_imm:enc_cg(Imm, is_aligned(Erule)),
emit([".",nl]).
-gen_encode_constructed_imm(Erule, Typename, #type{}=D) ->
- {ExtAddGroup,TmpCompList,TableConsInfo} =
- case D#type.def of
- #'SEQUENCE'{tablecinf=TCI,components=CL,extaddgroup=ExtAddGroup0} ->
- {ExtAddGroup0,CL,TCI};
- #'SET'{tablecinf=TCI,components=CL} ->
- {undefined,CL,TCI}
- end,
-
- CompList = case ExtAddGroup of
- undefined ->
- TmpCompList;
- _ when is_integer(ExtAddGroup) ->
- %% This is a fake SEQUENCE representing an ExtensionAdditionGroup
- %% Reset the textual order so we get the right
- %% index of the components
- [Comp#'ComponentType'{textual_order=undefined}||
- Comp<-TmpCompList]
- end,
- ExternalImm =
- case Typename of
- ['EXTERNAL'] ->
- Next = asn1ct_gen:mk_var(asn1ct_name:next(val)),
- Curr = asn1ct_gen:mk_var(asn1ct_name:curr(val)),
- asn1ct_name:new(val),
- [{call,ext,transform_to_EXTERNAL1990,[{var,Curr}],{var,Next}}];
- _ ->
- []
- end,
- Aligned = is_aligned(Erule),
- Value0 = make_var(val),
+gen_encode_constructed_imm(Gen, Typename, #type{}=D) ->
+ {CompList,TableConsInfo} = enc_complist(D),
+ ExternalImm = external_imm(Gen, Typename),
Optionals = optionals(to_textual_order(CompList)),
- ImmOptionals = [asn1ct_imm:per_enc_optional(Value0, Opt, Aligned) ||
- Opt <- Optionals],
+ ImmOptionals = enc_optionals(Gen, Optionals),
Ext = extensible_enc(CompList),
+ Aligned = is_aligned(Gen),
ExtImm = case Ext of
{ext,ExtPos,NumExt} when NumExt > 0 ->
- gen_encode_extaddgroup(CompList),
+ gen_encode_extaddgroup(Gen, CompList),
Value = make_var(val),
- asn1ct_imm:per_enc_extensions(Value, ExtPos,
- NumExt, Aligned);
+ enc_extensions(Gen, Value, ExtPos, NumExt, Aligned);
_ ->
[]
end,
- {EncObj,ObjSetImm} =
- case TableConsInfo of
- #simpletableattributes{usedclassfield=Used,
- uniqueclassfield=Unique} when Used /= Unique ->
- {false,[]};
- %% ObjectSet, name of the object set in constraints
- %%
- %%{ObjectSet,AttrN,N,UniqueFieldName} -> %% N is index of attribute that determines constraint
- #simpletableattributes{objectsetname=ObjectSet,
- c_name=AttrN,
- c_index=N,
- usedclassfield=UniqueFieldName,
- uniqueclassfield=UniqueFieldName,
- valueindex=ValueIndex0
- } -> %% N is index of attribute that determines constraint
- {Module,ObjSetName} = ObjectSet,
- #typedef{typespec=#'ObjectSet'{gen=Gen}} =
- asn1_db:dbget(Module, ObjSetName),
- case Gen of
- true ->
- ValueIndex = ValueIndex0 ++ [{N+1,top}],
- Val = make_var(val),
- {ObjSetImm0,Dst} = enc_dig_out_value(ValueIndex, Val),
- {{AttrN,Dst},ObjSetImm0};
- false ->
- {false,[]}
- end;
- _ ->
- case D#type.tablecinf of
- [{objfun,_}|_] ->
- %% when the simpletableattributes was at an outer
- %% level and the objfun has been passed through the
- %% function call
- {{"got objfun through args",{var,"ObjFun"}},[]};
- _ ->
- {false,[]}
- end
- end,
+ MatchImm = enc_map_match(Gen, CompList),
+ {EncObj,ObjSetImm} = enc_table(Gen, TableConsInfo, D),
ImmSetExt =
case Ext of
{ext,_Pos,NumExt2} when NumExt2 > 0 ->
@@ -141,38 +86,195 @@ gen_encode_constructed_imm(Erule, Typename, #type{}=D) ->
_ ->
[]
end,
- ImmBody = gen_enc_components_call(Erule, Typename, CompList, EncObj, Ext),
- ExternalImm ++ ExtImm ++ ObjSetImm ++
+ ImmBody = gen_enc_components_call(Gen, Typename, CompList, EncObj, Ext),
+ ExternalImm ++ MatchImm ++ ExtImm ++ ObjSetImm ++
asn1ct_imm:enc_append([ImmSetExt] ++ ImmOptionals ++ ImmBody).
-gen_encode_extaddgroup(CompList) ->
+external_imm(Gen, ['EXTERNAL']) ->
+ Next = asn1ct_gen:mk_var(asn1ct_name:next(val)),
+ Curr = asn1ct_gen:mk_var(asn1ct_name:curr(val)),
+ asn1ct_name:new(val),
+ F = case Gen of
+ #gen{pack=record} -> transform_to_EXTERNAL1990;
+ #gen{pack=map} -> transform_to_EXTERNAL1990_maps
+ end,
+ [{call,ext,F,[{var,Curr}],{var,Next}}];
+external_imm(_, _) ->
+ [].
+
+enc_extensions(#gen{pack=record}, Value, ExtPos, NumExt, Aligned) ->
+ asn1ct_imm:per_enc_extensions(Value, ExtPos, NumExt, Aligned);
+enc_extensions(#gen{pack=map}, Value, ExtPos, NumExt, Aligned) ->
+ Vars = [{var,lists:concat(["Input@",Pos])} ||
+ Pos <- lists:seq(ExtPos, ExtPos+NumExt-1)],
+ Undefined = atom_to_list(?MISSING_IN_MAP),
+ asn1ct_imm:per_enc_extensions_map(Value, Vars, Undefined, Aligned).
+
+enc_complist(#type{def=Def}) ->
+ case Def of
+ #'SEQUENCE'{tablecinf=TCI,components=CL0,extaddgroup=ExtAddGroup} ->
+ case ExtAddGroup of
+ undefined ->
+ {CL0,TCI};
+ _ when is_integer(ExtAddGroup) ->
+ %% This is a fake SEQUENCE representing an
+ %% ExtensionAdditionGroup. Renumber the textual
+ %% order so we get the right index of the
+ %% components.
+ CL = add_textual_order(CL0),
+ {CL,TCI}
+ end;
+ #'SET'{tablecinf=TCI,components=CL} ->
+ {CL,TCI}
+ end.
+
+enc_table(Gen, #simpletableattributes{objectsetname=ObjectSet,
+ c_name=AttrN,
+ c_index=N,
+ usedclassfield=UniqueFieldName,
+ uniqueclassfield=UniqueFieldName,
+ valueindex=ValueIndex0}, _) ->
+ {Module,ObjSetName} = ObjectSet,
+ #typedef{typespec=#'ObjectSet'{gen=MustGen}} =
+ asn1_db:dbget(Module, ObjSetName),
+ case MustGen of
+ true ->
+ ValueIndex = ValueIndex0 ++ [{N+1,'ASN1_top'}],
+ Val = make_var(val),
+ {ObjSetImm,Dst} = enc_dig_out_value(Gen, ValueIndex, Val),
+ {{AttrN,Dst},ObjSetImm};
+ false ->
+ {false,[]}
+ end;
+enc_table(_Gen, #simpletableattributes{}, _) ->
+ {false,[]};
+enc_table(_Gen, _, #type{tablecinf=TCInf}) ->
+ case TCInf of
+ [{objfun,_}|_] ->
+ %% The simpletableattributes was at an outer
+ %% level and the objfun has been passed through the
+ %% function call.
+ {{"got objfun through args",{var,"ObjFun"}},[]};
+ _ ->
+ {false,[]}
+ end.
+
+enc_optionals(Gen, Optionals) ->
+ Var = make_var(val),
+ enc_optionals_1(Gen, Optionals, Var).
+
+enc_optionals_1(#gen{pack=record}=Gen, [{Pos,DefVals}|T], Var) ->
+ {Imm0,Element} = asn1ct_imm:enc_element(Pos+1, Var),
+ Imm = asn1ct_imm:per_enc_optional(Element, DefVals),
+ [Imm0++Imm|enc_optionals_1(Gen, T, Var)];
+enc_optionals_1(#gen{pack=map}=Gen, [{Pos,DefVals0}|T], V) ->
+ Var = {var,lists:concat(["Input@",Pos])},
+ DefVals = translate_missing_value(Gen, DefVals0),
+ Imm = asn1ct_imm:per_enc_optional(Var, DefVals),
+ [Imm|enc_optionals_1(Gen, T, V)];
+enc_optionals_1(_, [], _) ->
+ [].
+
+enc_map_match(#gen{pack=record}, _Cs) ->
+ [];
+enc_map_match(#gen{pack=map}, Cs0) ->
+ Var0 = "Input",
+ Cs = enc_flatten_components(Cs0),
+ M = [[quote_atom(Name),":=",lists:concat([Var0,"@",Order])] ||
+ #'ComponentType'{prop=mandatory,name=Name,
+ textual_order=Order} <- Cs],
+ Mand = case M of
+ [] ->
+ [];
+ [_|_] ->
+ Patt = {expr,lists:flatten(["#{",lists:join(",", M),"}"])},
+ [{assign,Patt,{var,asn1ct_name:curr(val)}}]
+ end,
+
+ Os0 = [{Name,Order} ||
+ #'ComponentType'{prop=Prop,name=Name,
+ textual_order=Order} <- Cs,
+ Prop =/= mandatory],
+ {var,Val} = make_var(val),
+ F = fun({Name,Order}) ->
+ Var = lists:concat([Var0,"@",Order]),
+ P0 = ["case ",Val," of\n"
+ " #{",quote_atom(Name),":=",Var,"_0} -> ",
+ Var,"_0;\n"
+ " _ -> ",atom_to_list(?MISSING_IN_MAP),"\n"
+ "end"],
+ P = lists:flatten(P0),
+ {assign,{var,Var},P}
+ end,
+ Os = [F(O) || O <- Os0],
+ Mand ++ Os.
+
+enc_flatten_components({Root1,Ext0,Root2}=CL) ->
+ {_,Gs} = extgroup_pos_and_length(CL),
+ Ext = wrap_extensionAdditionGroups(Ext0, Gs),
+ Root1 ++ Root2 ++ [mark_optional(C) || C <- Ext];
+enc_flatten_components({Root,Ext}) ->
+ enc_flatten_components({Root,Ext,[]});
+enc_flatten_components(Cs) ->
+ Cs.
+
+gen_encode_extaddgroup(#gen{pack=record}, CompList) ->
case extgroup_pos_and_length(CompList) of
{extgrouppos,[]} ->
ok;
{extgrouppos,ExtGroupPosLenList} ->
- _ = [do_gen_encode_extaddgroup(G) || G <- ExtGroupPosLenList],
+ _ = [gen_encode_eag_record(G) ||
+ G <- ExtGroupPosLenList],
ok
- end.
+ end;
+gen_encode_extaddgroup(#gen{pack=map}, Cs0) ->
+ Cs = enc_flatten_components(Cs0),
+ gen_encode_eag_map(Cs).
+
+gen_encode_eag_map([#'ComponentType'{name=Group,typespec=Type}|Cs]) ->
+ case Type of
+ #type{def=#'SEQUENCE'{extaddgroup=G,components=GCs0}}
+ when is_integer(G) ->
+ Ns = [N || #'ComponentType'{name=N,prop=mandatory} <- GCs0],
+ test_for_mandatory(Ns, Group),
+ gen_encode_eag_map(Cs);
+ _ ->
+ gen_encode_eag_map(Cs)
+ end;
+gen_encode_eag_map([]) ->
+ ok.
+
+test_for_mandatory([Mand|_], Group) ->
+ emit([{next,val}," = case ",{curr,val}," of",nl,
+ "#{",quote_atom(Mand),":=_} -> ",
+ {curr,val},"#{",{asis,Group},"=>",{curr,val},"};",nl,
+ "#{} -> ",{curr,val},nl,
+ "end,",nl]),
+ asn1ct_name:new(val);
+test_for_mandatory([], _) ->
+ ok.
-do_gen_encode_extaddgroup({ActualGroupPos,GroupVirtualPos,GroupLen}) ->
+gen_encode_eag_record({ActualPos,VirtualPos,Len}) ->
Val = asn1ct_gen:mk_var(asn1ct_name:curr(val)),
- Elements = make_elements(GroupVirtualPos+1,
- Val,
- lists:seq(1, GroupLen)),
- Expr = any_non_value(GroupVirtualPos+1, Val, GroupLen, ""),
+ Elements = get_input_vars(Val, VirtualPos, Len),
+ Expr = any_non_value(Val, VirtualPos, Len),
emit([{next,val}," = case ",Expr," of",nl,
- "false -> setelement(",{asis,ActualGroupPos+1},", ",
+ "false -> setelement(",{asis,ActualPos+1},", ",
{curr,val},", asn1_NOVALUE);",nl,
- "true -> setelement(",{asis,ActualGroupPos+1},", ",
+ "true -> setelement(",{asis,ActualPos+1},", ",
{curr,val},", {extaddgroup,", Elements,"})",nl,
"end,",nl]),
asn1ct_name:new(val).
-any_non_value(_, _, 0, _) ->
+any_non_value(Val, Pos, N) ->
+ L = any_non_value_1(Val, Pos, N),
+ lists:join(" orelse ", L).
+
+any_non_value_1(_, _, 0) ->
[];
-any_non_value(Pos, Val, N, Sep) ->
- Sep ++ [make_element(Pos, Val)," =/= asn1_NOVALUE"] ++
- any_non_value(Pos+1, Val, N-1, [" orelse",nl]).
+any_non_value_1(Val, Pos, N) ->
+ Var = get_input_var(Val, Pos),
+ [Var ++ " =/= asn1_NOVALUE"|any_non_value_1(Val, Pos+1, N-1)].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% generate decode function for SEQUENCE and SET
@@ -306,55 +408,105 @@ gen_dec_constructed_imm(Erule, Typename, #type{}=D) ->
{DecObjInf,_,_} = ObjSetInfo,
EmitComp = gen_dec_components_call(Erule, Typename, CompList,
DecObjInf, Ext, length(Optionals)),
- EmitRest = fun({AccTerm,AccBytes}) ->
- gen_dec_constructed_imm_2(Erule, Typename,
- CompList,
- ObjSetInfo,
- AccTerm, AccBytes)
- end,
- [EmitExt,EmitOpt|EmitComp++[{safe,EmitRest}]].
+ EmitObjSets = gen_dec_objsets_fun(Erule, ObjSetInfo),
+ EmitPack = fun(_) ->
+ gen_dec_pack(Erule, Typename, CompList)
+ end,
+ RestGroup = {group,[{safe,EmitObjSets},{safe,EmitPack}]},
+ [EmitExt,EmitOpt|EmitComp++[RestGroup]].
+
+gen_dec_objsets_fun(Gen, ObjSetInfo) ->
+ fun({AccTerm,AccBytes}) ->
+ {_,_UniqueFName,ValueIndex} = ObjSetInfo,
+ case {AccTerm,AccBytes} of
+ {[],[]} ->
+ ok;
+ {_,[]} ->
+ ok;
+ {[{ObjSet,LeadingAttr,Term}],ListOfOpenTypes} ->
+ ValueMatch = value_match(Gen, ValueIndex, Term),
+ _ = [begin
+ gen_dec_open_type(Gen, ValueMatch, ObjSet,
+ LeadingAttr, T),
+ emit([com,nl])
+ end || T <- ListOfOpenTypes],
+ ok
+ end
+ end.
-gen_dec_constructed_imm_2(Erule, Typename, CompList,
- ObjSetInfo, AccTerm, AccBytes) ->
- {_,_UniqueFName,ValueIndex} = ObjSetInfo,
- case {AccTerm,AccBytes} of
- {[],[]} ->
- ok;
- {_,[]} ->
- ok;
- {[{ObjSet,LeadingAttr,Term}],ListOfOpenTypes} ->
- ValueMatch = value_match(ValueIndex, Term),
- _ = [begin
- gen_dec_open_type(Erule, ValueMatch, ObjSet,
- LeadingAttr, T),
- emit([com,nl])
- end || T <- ListOfOpenTypes],
- ok
- end,
- %% we don't return named lists any more Cnames = mkcnamelist(CompList),
- demit({"Result = "}), %dbg
- %% return value as record
- RecordName = record_name(Typename),
+gen_dec_pack(Gen, Typename, CompList) ->
case Typename of
['EXTERNAL'] ->
- emit({" OldFormat={'",RecordName,
- "'"}),
- mkvlist(asn1ct_name:all(term)),
- emit({"},",nl}),
- emit([" ASN11994Format =",nl,
- " ",
- {call,ext,transform_to_EXTERNAL1994,
- ["OldFormat"]},com,nl]),
- emit(" {ASN11994Format,");
+ dec_external(Gen, Typename);
_ ->
- emit(["{{'",RecordName,"'"]),
- %% CompList is used here because we don't want
- %% ExtensionAdditionGroups to be wrapped in SEQUENCES when
- %% we are ordering the fields according to textual order
- mkvlist(textual_order(to_encoding_order(CompList),asn1ct_name:all(term))),
- emit("},")
- end,
- emit({{curr,bytes},"}"}).
+ asn1ct_name:new(res),
+ gen_dec_do_pack(Gen, Typename, CompList),
+ emit([com,nl,
+ "{",{curr,res},",",{curr,bytes},"}"])
+ end.
+
+dec_external(#gen{pack=record}=Gen, Typename) ->
+ RecordName = list_to_atom(record_name(Gen, Typename)),
+ All = [{var,Term} || Term <- asn1ct_name:all(term)],
+ Record = [{asis,RecordName}|All],
+ emit(["OldFormat={",lists:join(",", Record),"},",nl,
+ "ASN11994Format =",nl,
+ {call,ext,transform_to_EXTERNAL1994,
+ ["OldFormat"]},com,nl,
+ "{ASN11994Format,",{curr,bytes},"}"]);
+dec_external(#gen{pack=map}, _Typename) ->
+ Vars = asn1ct_name:all(term),
+ Names = ['direct-reference','indirect-reference',
+ 'data-value-descriptor',encoding],
+ Zipped = lists:zip(Names, Vars),
+ MapInit = lists:join(",", [["'",N,"'=>",{var,V}] || {N,V} <- Zipped]),
+ emit(["OldFormat = #{",MapInit,"}",com,nl,
+ "ASN11994Format =",nl,
+ {call,ext,transform_to_EXTERNAL1994_maps,
+ ["OldFormat"]},com,nl,
+ "{ASN11994Format,",{curr,bytes},"}"]).
+
+gen_dec_do_pack(#gen{pack=record}=Gen, TypeName, CompList) ->
+ Zipped0 = zip_components(CompList, asn1ct_name:all(term)),
+ Zipped = textual_order(Zipped0),
+ RecordName = ["'",record_name(Gen, TypeName),"'"],
+ L = [RecordName|[{var,Var} || {_,Var} <- Zipped]],
+ emit([{curr,res}," = {",lists:join(",", L),"}"]);
+gen_dec_do_pack(#gen{pack=map}, _, CompList0) ->
+ CompList = enc_flatten_components(CompList0),
+ Zipped0 = zip_components(CompList, asn1ct_name:all(term)),
+ Zipped = textual_order(Zipped0),
+ PF = fun({#'ComponentType'{prop='OPTIONAL'},_}) -> false;
+ ({_,_}) -> true
+ end,
+ {Mandatory,Optional} = lists:partition(PF, Zipped),
+ L = [[{asis,Name},"=>",{var,Var}] ||
+ {#'ComponentType'{name=Name},Var} <- Mandatory],
+ emit([{curr,res}," = #{",lists:join(",", L),"}"]),
+ gen_dec_map_optional(Optional),
+ gen_dec_merge_maps(asn1ct_name:all(map)).
+
+gen_dec_map_optional([{#'ComponentType'{name=Name},Var}|T]) ->
+ asn1ct_name:new(res),
+ emit([com,nl,
+ {curr,res}," = case ",{var,Var}," of",nl,
+ " asn1_NOVALUE -> ",{prev,res},";",nl,
+ " _ -> ",{prev,res},"#{",{asis,Name},"=>",{var,Var},"}",nl,
+ "end"]),
+ gen_dec_map_optional(T);
+gen_dec_map_optional([]) ->
+ ok.
+
+gen_dec_merge_maps([M|Ms]) ->
+ asn1ct_name:new(res),
+ emit([com,nl,
+ {curr,res}," = maps:merge(",{prev,res},", ",{var,M},")"]),
+ gen_dec_merge_maps(Ms);
+gen_dec_merge_maps([]) ->
+ ok.
+
+quote_atom(A) when is_atom(A) ->
+ io_lib:format("~p", [A]).
%% record_name([TypeName]) -> RecordNameString
%% Construct a record name for the constructed type, ignoring any
@@ -362,10 +514,10 @@ gen_dec_constructed_imm_2(Erule, Typename, CompList,
%% group. Such fake sequences never appear as a top type, and their
%% name always start with "ExtAddGroup".
-record_name(Typename0) ->
+record_name(Gen, Typename0) ->
[TopType|Typename1] = lists:reverse(Typename0),
Typename = filter_ext_add_groups(Typename1, [TopType]),
- lists:concat([get_record_name_prefix(),
+ lists:concat([get_record_name_prefix(Gen),
asn1ct_gen:list2rname(Typename)]).
filter_ext_add_groups([H|T], Acc) when is_atom(H) ->
@@ -379,17 +531,26 @@ filter_ext_add_groups([H|T], Acc) ->
filter_ext_add_groups(T, [H|Acc]);
filter_ext_add_groups([], Acc) -> Acc.
-textual_order([#'ComponentType'{textual_order=undefined}|_],TermList) ->
- TermList;
-textual_order(CompList,TermList) when is_list(CompList) ->
- OrderList = [Ix||#'ComponentType'{textual_order=Ix} <- CompList],
- [Term||{_,Term}<-
- lists:sort(lists:zip(OrderList,
- lists:sublist(TermList,length(OrderList))))];
- %% sublist is just because Termlist can sometimes be longer than
- %% OrderList, which it really shouldn't
-textual_order({Root,Ext},TermList) ->
- textual_order(Root ++ Ext,TermList).
+zip_components({Root,Ext}, Vars) ->
+ zip_components({Root,Ext,[]}, Vars);
+zip_components({R1,Ext0,R2}, Vars) ->
+ Ext = [mark_optional(C) || C <- Ext0],
+ zip_components(R1++R2++Ext, Vars);
+zip_components(Cs, Vars) when is_list(Cs) ->
+ zip_components_1(Cs, Vars).
+
+zip_components_1([#'ComponentType'{}=C|Cs], [V|Vs]) ->
+ [{C,V}|zip_components_1(Cs, Vs)];
+zip_components_1([_|Cs], Vs) ->
+ zip_components_1(Cs, Vs);
+zip_components_1([], []) ->
+ [].
+
+textual_order([{#'ComponentType'{textual_order=undefined},_}|_]=L) ->
+ L;
+textual_order(L0) ->
+ L = [{Ix,P} || {#'ComponentType'{textual_order=Ix},_}=P <- L0],
+ [C || {_,C} <- lists:sort(L)].
to_textual_order({Root,Ext}) ->
{to_textual_order(Root),Ext};
@@ -458,7 +619,7 @@ dec_objset_default(N, _, _, true) ->
end]).
dec_objset_1(Erule, N, {Id,Obj}, RestFields, Typename) ->
- emit([{asis,N},"(Bytes, ",{asis,Id},") ->",nl]),
+ emit([{asis,N},"(Bytes, Id) when Id =:= ",{asis,Id}," ->",nl]),
dec_objset_2(Erule, Obj, RestFields, Typename).
dec_objset_2(Erule, Obj, RestFields0, Typename) ->
@@ -595,8 +756,7 @@ do_gen_decode_sof(Erules, Typename, SeqOrSetOf, D) ->
emit([",",nl,
{asis,F},"(",Num,", ",Buf,ObjFun,", [])"]).
-is_aligned(per) -> true;
-is_aligned(uper) -> false.
+is_aligned(#gen{erule=per,aligned=Aligned}) -> Aligned.
gen_decode_length(Constraint, Erule) ->
emit(["%% Length with constraint ",{asis,Constraint},nl]),
@@ -640,22 +800,7 @@ gen_decode_sof_components(Erule, Name, Typename, SeqOrSetOf, Cont) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-% General and special help functions (not exported)
-
-mkvlist([H|T]) ->
- emit(","),
- mkvlist2([H|T]);
-mkvlist([]) ->
- true.
-mkvlist2([H,T1|T]) ->
- emit({{var,H},","}),
- mkvlist2([T1|T]);
-mkvlist2([H|T]) ->
- emit({{var,H}}),
- mkvlist2(T);
-mkvlist2([]) ->
- true.
-
+%% General and special help functions (not exported)
extensible_dec(CompList) when is_list(CompList) ->
noext;
@@ -728,28 +873,26 @@ gen_dec_optionals(Optionals) ->
{imm,Imm0,E}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% Produce a list with positions (in the Value record) where
-%% there are optional components, start with 2 because first element
-%% is the record name
-
-optionals({L1,Ext,L2}) ->
- Opt1 = optionals(L1,[],2),
- ExtComps = length([C||C = #'ComponentType'{}<-Ext]),
- Opt2 = optionals(L2,[],2+length(L1)+ExtComps),
- Opt1 ++ Opt2;
-optionals({L,_Ext}) -> optionals(L,[],2);
-optionals(L) -> optionals(L,[],2).
-optionals([#'ComponentType'{prop='OPTIONAL'}|Rest], Acc, Pos) ->
- optionals(Rest, [Pos|Acc], Pos+1);
-optionals([#'ComponentType'{typespec=T,prop={'DEFAULT',Val}}|Rest],
- Acc, Pos) ->
+optionals({Root1,Ext,Root2}) ->
+ Opt1 = optionals(Root1, 1),
+ ExtComps = length([C || C = #'ComponentType'{} <- Ext]),
+ Opt2 = optionals(Root2, 1 + length(Root1) + ExtComps),
+ Opt1 ++ Opt2;
+optionals({L,_Ext}) ->
+ optionals(L, 1);
+optionals(L) ->
+ optionals(L, 1).
+
+optionals([#'ComponentType'{prop='OPTIONAL'}|Rest], Pos) ->
+ [{Pos,[asn1_NOVALUE]}|optionals(Rest, Pos+1)];
+optionals([#'ComponentType'{typespec=T,prop={'DEFAULT',Val}}|Cs], Pos) ->
Vals = def_values(T, Val),
- optionals(Rest, [{Pos,Vals}|Acc], Pos+1);
-optionals([#'ComponentType'{}|Rest], Acc, Pos) ->
- optionals(Rest, Acc, Pos+1);
-optionals([], Acc, _) ->
- lists:reverse(Acc).
+ [{Pos,Vals}|optionals(Cs, Pos+1)];
+optionals([#'ComponentType'{}|Rest], Pos) ->
+ optionals(Rest, Pos+1);
+optionals([], _) ->
+ [].
%%%%%%%%%%%%%%%%%%%%%%
%% create_optionality_table(Cs=[#'ComponentType'{textual_order=undefined}|_]) ->
@@ -779,13 +922,6 @@ get_optionality_pos(TextPos,OptTable) ->
no_num
end.
-to_encoding_order(Cs) when is_list(Cs) ->
- Cs;
-to_encoding_order(Cs = {_Root,_Ext}) ->
- Cs;
-to_encoding_order({R1,Ext,R2}) ->
- {R1++R2,Ext}.
-
add_textual_order(Cs) when is_list(Cs) ->
{NewCs,_} = add_textual_order1(Cs,1),
NewCs;
@@ -810,69 +946,77 @@ add_textual_order1(Cs,NumIn) ->
end,
NumIn,Cs).
-gen_enc_components_call(Erule,TopType,{Root,ExtList}, DynamicEnc,Ext) ->
- gen_enc_components_call(Erule,TopType,{Root,ExtList,[]}, DynamicEnc,Ext);
-gen_enc_components_call(Erule,TopType,CL={Root,ExtList,Root2}, DynamicEnc,Ext) ->
- %% The type has extensionmarker
- {Imm0,Rpos} = gen_enc_components_call1(Erule,TopType,Root++Root2,1, DynamicEnc,noext,[]),
+gen_enc_components_call(Erule, TopType, {Root,ExtList}, DynamicEnc, Ext) ->
+ gen_enc_components_call(Erule, TopType, {Root,ExtList,[]}, DynamicEnc, Ext);
+gen_enc_components_call(Erule, TopType, {R1,ExtList0,R2}=CL, DynamicEnc, Ext) ->
+ Root = R1 ++ R2,
+ Imm0 = gen_enc_components_call1(Erule, TopType, Root, DynamicEnc, noext),
ExtImm = case Ext of
{ext,_,ExtNum} when ExtNum > 0 ->
[{var,"Extensions"}];
_ ->
[]
end,
- %handle extensions
{extgrouppos,ExtGroupPosLen} = extgroup_pos_and_length(CL),
- NewExtList = wrap_extensionAdditionGroups(ExtList,ExtGroupPosLen),
- {Imm1,_} = gen_enc_components_call1(Erule,TopType,NewExtList,Rpos,DynamicEnc,Ext,[]),
+ ExtList1 = wrap_extensionAdditionGroups(ExtList0, ExtGroupPosLen),
+ ExtList = [mark_optional(C) || C <- ExtList1],
+ Imm1 = gen_enc_components_call1(Erule, TopType, ExtList, DynamicEnc, Ext),
Imm0 ++ [ExtImm|Imm1];
-gen_enc_components_call(Erule,TopType, CompList, DynamicEnc, Ext) ->
- %% The type has no extensionmarker
- {Imm,_} = gen_enc_components_call1(Erule,TopType,CompList,1,DynamicEnc,Ext,[]),
- Imm.
-
-gen_enc_components_call1(Erule,TopType,
- [C=#'ComponentType'{name=Cname,typespec=Type,prop=Prop}|Rest],
- Tpos,
- DynamicEnc, Ext, Acc) ->
-
- TermNo =
- case C#'ComponentType'.textual_order of
- undefined ->
- Tpos;
- CanonicalNum ->
- CanonicalNum
- end,
- Val = make_var(val),
- {Imm0,Element} = asn1ct_imm:enc_element(TermNo+1, Val),
- Imm1 = gen_enc_line_imm(Erule, TopType, Cname, Type, Element, DynamicEnc, Ext),
- Category = case {Prop,Ext} of
- {'OPTIONAL',_} ->
- optional;
- {{'DEFAULT',DefVal},_} ->
- {default,DefVal};
- {_,{ext,ExtPos,_}} when Tpos >= ExtPos ->
- optional;
- {_,_} ->
- mandatory
- end,
- Imm2 = case Category of
+gen_enc_components_call(Erule, TopType, CompList, DynamicEnc, Ext) ->
+ %% No extension marker.
+ gen_enc_components_call1(Erule, TopType, CompList, DynamicEnc, Ext).
+
+mark_optional(#'ComponentType'{prop=Prop0}=C) ->
+ Prop = case Prop0 of
+ mandatory -> 'OPTIONAL';
+ 'OPTIONAL'=Keep -> Keep;
+ {'DEFAULT',_}=Keep -> Keep
+ end,
+ C#'ComponentType'{prop=Prop};
+mark_optional(Other) ->
+ Other.
+
+gen_enc_components_call1(Gen, TopType, [C|Rest], DynamicEnc, Ext) ->
+ #'ComponentType'{name=Cname,typespec=Type,
+ prop=Prop,textual_order=Num} = C,
+ {Imm0,Element} = enc_fetch_field(Gen, Num, Prop),
+ Imm1 = gen_enc_line_imm(Gen, TopType, Cname, Type,
+ Element, DynamicEnc, Ext),
+ Imm2 = case Prop of
mandatory ->
Imm1;
- optional ->
- asn1ct_imm:enc_absent(Element, [asn1_NOVALUE], Imm1);
- {default,Def} ->
+ 'OPTIONAL' ->
+ enc_absent(Gen, Element, [asn1_NOVALUE], Imm1);
+ {'DEFAULT',Def} ->
DefValues = def_values(Type, Def),
- asn1ct_imm:enc_absent(Element, DefValues, Imm1)
+ enc_absent(Gen, Element, DefValues, Imm1)
end,
Imm = case Imm2 of
[] -> [];
_ -> Imm0 ++ Imm2
end,
- gen_enc_components_call1(Erule, TopType, Rest, Tpos+1, DynamicEnc, Ext, [Imm|Acc]);
-gen_enc_components_call1(_Erule,_TopType,[],Pos,_,_, Acc) ->
- ImmList = lists:reverse(Acc),
- {ImmList,Pos}.
+ [Imm|gen_enc_components_call1(Gen, TopType, Rest, DynamicEnc, Ext)];
+gen_enc_components_call1(_Gen, _TopType, [], _, _) ->
+ [].
+
+enc_absent(Gen, Var, Absent0, Imm) ->
+ Absent = translate_missing_value(Gen, Absent0),
+ asn1ct_imm:enc_absent(Var, Absent, Imm).
+
+translate_missing_value(#gen{pack=record}, Optionals) ->
+ Optionals;
+translate_missing_value(#gen{pack=map}, Optionals) ->
+ case Optionals of
+ [asn1_NOVALUE|T] -> [?MISSING_IN_MAP|T];
+ [asn1_DEFAULT|T] -> [?MISSING_IN_MAP|T];
+ {call,_,_,_} -> Optionals
+ end.
+
+enc_fetch_field(#gen{pack=record}, Num, _Prop) ->
+ Val = make_var(val),
+ asn1ct_imm:enc_element(Num+1, Val);
+enc_fetch_field(#gen{pack=map}, Num, _) ->
+ {[],{var,lists:concat(["Input@",Num])}}.
def_values(#type{def=#'Externaltypereference'{module=Mod,type=Type}}, Def) ->
#typedef{typespec=T} = asn1_db:dbget(Mod, Type),
@@ -1115,27 +1259,31 @@ gen_dec_components_call(Erule, TopType, {Root,ExtList},
DecInfObj, Ext, NumberOfOptionals) ->
gen_dec_components_call(Erule,TopType,{Root,ExtList,[]},
DecInfObj,Ext,NumberOfOptionals);
-gen_dec_components_call(Erule,TopType,CL={Root1,ExtList,Root2},
- DecInfObj,Ext,NumberOfOptionals) ->
+gen_dec_components_call(Gen, TopType, {Root1,ExtList,Root2}=CL,
+ DecInfObj, Ext, NumberOfOptionals) ->
%% The type has extensionmarker
OptTable = create_optionality_table(Root1++Root2),
Init = {ignore,fun(_) -> {[],[]} end},
{EmitRoot,Tpos} =
- gen_dec_comp_calls(Root1++Root2, Erule, TopType, OptTable,
+ gen_dec_comp_calls(Root1++Root2, Gen, TopType, OptTable,
DecInfObj, noext, NumberOfOptionals,
1, []),
- EmitGetExt = gen_dec_get_extension(Erule),
+ EmitGetExt = gen_dec_get_extension(Gen),
{extgrouppos,ExtGroupPosLen} = extgroup_pos_and_length(CL),
NewExtList = wrap_extensionAdditionGroups(ExtList, ExtGroupPosLen),
- {EmitExts,_} = gen_dec_comp_calls(NewExtList, Erule, TopType, OptTable,
+ {EmitExts,_} = gen_dec_comp_calls(NewExtList, Gen, TopType, OptTable,
DecInfObj, Ext, NumberOfOptionals,
Tpos, []),
NumExtsToSkip = ext_length(ExtList),
Finish =
fun(St) ->
emit([{next,bytes},"= "]),
- call(Erule, skipextensions,
- [{curr,bytes},NumExtsToSkip+1,"Extensions"]),
+ Mod = case Gen of
+ #gen{erule=per,aligned=false} -> uper;
+ #gen{erule=per,aligned=true} -> per
+ end,
+ asn1ct_func:call(Mod, skipextensions,
+ [{curr,bytes},NumExtsToSkip+1,"Extensions"]),
asn1ct_name:new(bytes),
St
end,
@@ -1178,7 +1326,7 @@ gen_dec_comp_calls([C|Cs], Erule, TopType, OptTable, DecInfObj,
gen_dec_comp_calls([], _, _, _, _, _, _, Tpos, Acc) ->
{lists:append(lists:reverse(Acc)),Tpos}.
-gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj,
+gen_dec_comp_call(Comp, Gen, TopType, Tpos, OptTable, DecInfObj,
Ext, NumberOfOptionals) ->
#'ComponentType'{typespec=Type,prop=Prop,textual_order=TextPos} = Comp,
Pos = case Ext of
@@ -1219,15 +1367,9 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj,
_ ->
case Type of
#type{def=#'SEQUENCE'{
- extaddgroup=Number1,
- components=ExtGroupCompList1}} when is_integer(Number1)->
- fun(St) ->
- emit(["{{_,"]),
- emit_extaddgroupTerms(term,ExtGroupCompList1),
- emit(["}"]),
- emit([",",{next,bytes},"} = "]),
- St
- end;
+ extaddgroup=GroupNum,
+ components=CompList}} when is_integer(GroupNum)->
+ dec_match_extadd_fun(Gen, CompList);
_ ->
fun(St) ->
asn1ct_name:new(term),
@@ -1237,9 +1379,9 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj,
end
end
end,
- {Pre,Post} = comp_call_pre_post(Ext, Prop, Pos, Type, TextPos,
+ {Pre,Post} = comp_call_pre_post(Gen, Ext, Prop, Pos, Type, TextPos,
OptTable, NumberOfOptionals, Ext),
- Lines = gen_dec_seq_line_imm(Erule, TopType, Comp, Tpos, DecInfObj, Ext),
+ Lines = gen_dec_seq_line_imm(Gen, TopType, Comp, Tpos, DecInfObj, Ext),
AdvBuffer = {ignore,fun(St) ->
asn1ct_name:new(bytes),
St
@@ -1247,9 +1389,24 @@ gen_dec_comp_call(Comp, Erule, TopType, Tpos, OptTable, DecInfObj,
[{group,[{safe,Comment},{safe,Preamble}] ++ Pre ++
Lines ++ Post ++ [{safe,AdvBuffer}]}].
-comp_call_pre_post(noext, mandatory, _, _, _, _, _, _) ->
+dec_match_extadd_fun(#gen{pack=record}, CompList) ->
+ fun(St) ->
+ emit(["{{_,"]),
+ emit_extaddgroupTerms(term, CompList),
+ emit(["}"]),
+ emit([",",{next,bytes},"} = "]),
+ St
+ end;
+dec_match_extadd_fun(#gen{pack=map}, _CompList) ->
+ fun(St) ->
+ asn1ct_name:new(map),
+ emit(["{",{curr,map},",",{next,bytes},"} = "]),
+ St
+ end.
+
+comp_call_pre_post(_Gen, noext, mandatory, _, _, _, _, _, _) ->
{[],[]};
-comp_call_pre_post(noext, Prop, _, Type, TextPos,
+comp_call_pre_post(_Gen, noext, Prop, _, Type, TextPos,
OptTable, NumOptionals, Ext) ->
%% OPTIONAL or DEFAULT
OptPos = get_optionality_pos(TextPos, OptTable),
@@ -1273,32 +1430,53 @@ comp_call_pre_post(noext, Prop, _, Type, TextPos,
"end"]),
St
end]};
-comp_call_pre_post({ext,_,_}, Prop, Pos, Type, _, _, _, Ext) ->
+comp_call_pre_post(Gen, {ext,_,_}, Prop, Pos, Type, _, _, _, Ext) ->
%% Extension
{[fun(St) ->
emit(["case Extensions of",nl,
" <<_:",Pos-1,",1:1,_/bitstring>> ->",nl]),
St
end],
- [fun(St) ->
- emit([";",nl,
- "_ ->",nl,
- "{"]),
- case Type of
- #type{def=#'SEQUENCE'{
- extaddgroup=Number2,
- components=ExtGroupCompList2}}
- when is_integer(Number2)->
- emit("{extAddGroup,"),
- gen_dec_extaddGroup_no_val(Ext, Type, ExtGroupCompList2),
- emit("}");
- _ ->
- gen_dec_component_no_val(Ext, Type, Prop)
- end,
- emit([",",{curr,bytes},"}",nl,
- "end"]),
- St
- end]}.
+ [extadd_group_fun(Gen, Prop, Type, Ext)]}.
+
+extadd_group_fun(#gen{pack=record}, Prop, Type, Ext) ->
+ fun(St) ->
+ emit([";",nl,
+ "_ ->",nl,
+ "{"]),
+ case Type of
+ #type{def=#'SEQUENCE'{
+ extaddgroup=Number2,
+ components=ExtGroupCompList2}}
+ when is_integer(Number2)->
+ emit("{extAddGroup,"),
+ gen_dec_extaddGroup_no_val(Ext, Type, ExtGroupCompList2),
+ emit("}");
+ _ ->
+ gen_dec_component_no_val(Ext, Type, Prop)
+ end,
+ emit([",",{curr,bytes},"}",nl,
+ "end"]),
+ St
+ end;
+extadd_group_fun(#gen{pack=map}, Prop, Type, Ext) ->
+ fun(St) ->
+ emit([";",nl,
+ "_ ->",nl,
+ "{"]),
+ case Type of
+ #type{def=#'SEQUENCE'{
+ extaddgroup=Number2,
+ components=Comp}}
+ when is_integer(Number2)->
+ dec_map_extaddgroup_no_val(Ext, Type, Comp);
+ _ ->
+ gen_dec_component_no_val(Ext, Type, Prop)
+ end,
+ emit([",",{curr,bytes},"}",nl,
+ "end"]),
+ St
+ end.
is_mandatory_predef_tab_c(noext, mandatory,
{"got objfun through args","ObjFun"}) ->
@@ -1325,7 +1503,20 @@ gen_dec_component_no_val(_, _, 'OPTIONAL') ->
emit({"asn1_NOVALUE"});
gen_dec_component_no_val({ext,_,_}, _, mandatory) ->
emit({"asn1_NOVALUE"}).
-
+
+dec_map_extaddgroup_no_val(Ext, Type, Comp) ->
+ L0 = [dec_map_extaddgroup_no_val_1(N, P, Ext, Type) ||
+ #'ComponentType'{name=N,prop=P} <- Comp],
+ L = [E || E <- L0, E =/= []],
+ emit(["#{",lists:join(",", L),"}"]).
+
+dec_map_extaddgroup_no_val_1(Name, {'DEFAULT',DefVal0}, _Ext, Type) ->
+ DefVal = asn1ct_gen:conform_value(Type, DefVal0),
+ [Name,"=>",{asis,DefVal}];
+dec_map_extaddgroup_no_val_1(_Name, 'OPTIONAL', _, _) ->
+ [];
+dec_map_extaddgroup_no_val_1(_Name, mandatory, {ext,_,_}, _) ->
+ [].
gen_dec_choice_line(Erule, TopType, Comp, Pre) ->
Imm0 = gen_dec_line_imm(Erule, TopType, Comp, false, Pre),
@@ -1461,29 +1652,29 @@ gen_dec_line_special(Erule, {typefield,_}, _TopType, Comp,
Prop}],PrevSt}
end
end;
-gen_dec_line_special(Erule, Atype, TopType, Comp, DecInfObj) ->
- case gen_dec_line_other(Erule, Atype, TopType, Comp) of
+gen_dec_line_special(Gen, Atype, TopType, Comp, DecInfObj) ->
+ case gen_dec_line_other(Gen, Atype, TopType, Comp) of
Fun when is_function(Fun, 1) ->
fun({BytesVar,PrevSt}) ->
Fun(BytesVar),
- gen_dec_line_dec_inf(Comp, DecInfObj),
+ gen_dec_line_dec_inf(Gen,Comp, DecInfObj),
{[],PrevSt}
end;
Imm0 ->
{imm,Imm0,
fun(Imm, {BytesVar,PrevSt}) ->
asn1ct_imm:dec_code_gen(Imm, BytesVar),
- gen_dec_line_dec_inf(Comp, DecInfObj),
+ gen_dec_line_dec_inf(Gen, Comp, DecInfObj),
{[],PrevSt}
end}
end.
-gen_dec_line_dec_inf(Comp, DecInfObj) ->
+gen_dec_line_dec_inf(Gen, Comp, DecInfObj) ->
#'ComponentType'{name=Cname} = Comp,
case DecInfObj of
{Cname,{_,_OSet,_UniqueFName,ValIndex}} ->
Term = asn1ct_gen:mk_var(asn1ct_name:curr(term)),
- ValueMatch = value_match(ValIndex,Term),
+ ValueMatch = value_match(Gen, ValIndex,Term),
emit([",",nl,
"ObjFun = ",ValueMatch]);
_ ->
@@ -1705,20 +1896,17 @@ gen_dec_choice2(Erule, TopType, [H0|T], Pos, Sep0, Pre) ->
gen_dec_choice2(Erule, TopType, T, Pos+1, Sep, Pre);
gen_dec_choice2(_, _, [], _, _, _) -> ok.
-make_elements(I,Val,ExtCnames) ->
- make_elements(I,Val,ExtCnames,[]).
+get_input_vars(Val, I, N) ->
+ L = get_input_vars_1(Val, I, N),
+ lists:join(",", L).
-make_elements(I,Val,[_ExtCname],Acc)-> % the last one, no comma needed
- Element = make_element(I, Val),
- make_elements(I+1,Val,[],[Element|Acc]);
-make_elements(I,Val,[_ExtCname|Rest],Acc)->
- Element = make_element(I, Val),
- make_elements(I+1,Val,Rest,[", ",Element|Acc]);
-make_elements(_I,_,[],Acc) ->
- lists:reverse(Acc).
+get_input_vars_1(_Val, _I, 0) ->
+ [];
+get_input_vars_1(Val, I, N) ->
+ [get_input_var(Val, I)|get_input_vars_1(Val, I+1, N-1)].
-make_element(I, Val) ->
- lists:flatten(io_lib:format("element(~w, ~s)", [I,Val])).
+get_input_var(Val, I) ->
+ lists:flatten(io_lib:format("element(~w, ~s)", [I+1,Val])).
emit_extaddgroupTerms(VarSeries,[_]) ->
asn1ct_name:new(VarSeries),
@@ -1735,62 +1923,66 @@ flat_complist({Rl1,El,Rl2}) -> Rl1 ++ El ++ Rl2;
flat_complist({Rl,El}) -> Rl ++ El;
flat_complist(CompList) -> CompList.
-%%wrap_compList({Root1,Ext,Root2}) ->
-%% {Root1,wrap_extensionAdditionGroups(Ext),Root2};
-%%wrap_compList({Root1,Ext}) ->
-%% {Root1,wrap_extensionAdditionGroups(Ext)};
-%%wrap_compList(CompList) ->
-%% CompList.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% Will convert all componentTypes following 'ExtensionAdditionGroup'
+%% Convert all componentTypes following 'ExtensionAdditionGroup'
%% up to the matching 'ExtensionAdditionGroupEnd' into one componentType
-%% of type SEQUENCE with the componentTypes as components
+%% of type SEQUENCE with the componentTypes as components.
%%
-wrap_extensionAdditionGroups(ExtCompList,ExtGroupPosLen) ->
- wrap_extensionAdditionGroups(ExtCompList,ExtGroupPosLen,[],0,0).
+wrap_extensionAdditionGroups(ExtCompList, ExtGroupPosLen) ->
+ wrap_eags(ExtCompList, ExtGroupPosLen, 0, 0).
-wrap_extensionAdditionGroups([{'ExtensionAdditionGroup',_Number}|Rest],
- [{ActualPos,_,_}|ExtGroupPosLenRest],Acc,_ExtAddGroupDiff,ExtGroupNum) ->
- {ExtGroupCompList,['ExtensionAdditionGroupEnd'|Rest2]} =
+wrap_eags([{'ExtensionAdditionGroup',_Number}|T0],
+ [{ActualPos,_,_}|Gs], _ExtAddGroupDiff, ExtGroupNum) ->
+ {ExtGroupCompList,['ExtensionAdditionGroupEnd'|T]} =
lists:splitwith(fun(#'ComponentType'{}) -> true;
(_) -> false
- end,
- Rest),
- wrap_extensionAdditionGroups(Rest2,ExtGroupPosLenRest,
- [#'ComponentType'{
- name=list_to_atom("ExtAddGroup"++
- integer_to_list(ExtGroupNum+1)),
- typespec=#type{def=#'SEQUENCE'{
- extaddgroup=ExtGroupNum+1,
- components=ExtGroupCompList}},
- textual_order = ActualPos,
- prop='OPTIONAL'}|Acc],length(ExtGroupCompList)-1,
- ExtGroupNum+1);
-wrap_extensionAdditionGroups([H=#'ComponentType'{textual_order=Tord}|T],
- ExtAddGrpLenPos,Acc,ExtAddGroupDiff,ExtGroupNum) when is_integer(Tord) ->
- wrap_extensionAdditionGroups(T,ExtAddGrpLenPos,[H#'ComponentType'{
- textual_order=Tord - ExtAddGroupDiff}|Acc],ExtAddGroupDiff,ExtGroupNum);
-wrap_extensionAdditionGroups([H|T],ExtAddGrpLenPos,Acc,ExtAddGroupDiff,ExtGroupNum) ->
- wrap_extensionAdditionGroups(T,ExtAddGrpLenPos,[H|Acc],ExtAddGroupDiff,ExtGroupNum);
-wrap_extensionAdditionGroups([],_,Acc,_,_) ->
- lists:reverse(Acc).
-
-value_match(Index,Value) when is_atom(Value) ->
- value_match(Index,atom_to_list(Value));
-value_match([],Value) ->
+ end, T0),
+ Name = list_to_atom(lists:concat(["ExtAddGroup",ExtGroupNum+1])),
+ Seq = #type{def=#'SEQUENCE'{extaddgroup=ExtGroupNum+1,
+ components=ExtGroupCompList}},
+ Comp = #'ComponentType'{name=Name,
+ typespec=Seq,
+ textual_order=ActualPos,
+ prop='OPTIONAL'},
+ [Comp|wrap_eags(T, Gs, length(ExtGroupCompList)-1, ExtGroupNum+1)];
+wrap_eags([#'ComponentType'{textual_order=Tord}=H|T],
+ ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum)
+ when is_integer(Tord) ->
+ Comp = H#'ComponentType'{textual_order=Tord - ExtAddGroupDiff},
+ [Comp|wrap_eags(T, ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum)];
+wrap_eags([H|T], ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum) ->
+ [H|wrap_eags(T, ExtAddGrpLenPos, ExtAddGroupDiff, ExtGroupNum)];
+wrap_eags([], _, _, _) ->
+ [].
+
+value_match(#gen{pack=record}, VIs, Value) ->
+ value_match_rec(VIs, Value);
+value_match(#gen{pack=map}, VIs, Value) ->
+ value_match_map(VIs, Value).
+
+value_match_rec([], Value) ->
Value;
-value_match([{VI,_}|VIs],Value) ->
- value_match1(Value,VIs,lists:concat(["element(",VI,","]),1).
-value_match1(Value,[],Acc,Depth) ->
- Acc ++ Value ++ lists:concat(lists:duplicate(Depth,")"));
-value_match1(Value,[{VI,_}|VIs],Acc,Depth) ->
- value_match1(Value,VIs,Acc++lists:concat(["element(",VI,","]),Depth+1).
-
-enc_dig_out_value([], Value) ->
+value_match_rec([{VI,_}|VIs], Value0) ->
+ Value = value_match_rec(VIs, Value0),
+ lists:concat(["element(",VI,", ",Value,")"]).
+
+value_match_map([], Value) ->
+ Value;
+value_match_map([{_,Name}|VIs], Value0) ->
+ Value = value_match_map(VIs, Value0),
+ lists:concat(["maps:get(",Name,", ",Value,")"]).
+
+enc_dig_out_value(_Gen, [], Value) ->
{[],Value};
-enc_dig_out_value([{N,_}|T], Value) ->
- {Imm0,Dst0} = enc_dig_out_value(T, Value),
+enc_dig_out_value(#gen{pack=record}=Gen, [{N,_}|T], Value) ->
+ {Imm0,Dst0} = enc_dig_out_value(Gen, T, Value),
{Imm,Dst} = asn1ct_imm:enc_element(N, Dst0),
+ {Imm0++Imm,Dst};
+enc_dig_out_value(#gen{pack=map}, [{N,'ASN1_top'}], _Value) ->
+ {[],{var,lists:concat(["Input@",N-1])}};
+enc_dig_out_value(#gen{pack=map}=Gen, [{_,Name}|T], Value) ->
+ {Imm0,Dst0} = enc_dig_out_value(Gen, T, Value),
+ {Imm,Dst} = asn1ct_imm:enc_maps_get(Name, Dst0),
{Imm0++Imm,Dst}.
make_var(Base) ->
diff --git a/lib/asn1/src/asn1ct_eval_ext.funcs b/lib/asn1/src/asn1ct_eval_ext.funcs
index 5761901f89..01c67e7b5a 100644
--- a/lib/asn1/src/asn1ct_eval_ext.funcs
+++ b/lib/asn1/src/asn1ct_eval_ext.funcs
@@ -1 +1,2 @@
{ext,transform_to_EXTERNAL1994,1}.
+{ext,transform_to_EXTERNAL1994_maps,1}.
diff --git a/lib/asn1/src/asn1ct_gen.erl b/lib/asn1/src/asn1ct_gen.erl
index bfaffa13bf..9943bd056a 100644
--- a/lib/asn1/src/asn1ct_gen.erl
+++ b/lib/asn1/src/asn1ct_gen.erl
@@ -34,10 +34,10 @@
insert_once/2,
ct_gen_module/1,
index2suffix/1,
- get_record_name_prefix/0,
+ get_record_name_prefix/1,
conform_value/2,
named_bitstring_value/2]).
--export([pgen/5,
+-export([pgen/3,
mk_var/1,
un_hyphen_var/1]).
-export([gen_encode_constructed/4,
@@ -45,23 +45,19 @@
-define(SUPPRESSION_FUNC, 'dialyzer-suppressions').
+
%% pgen(Outfile, Erules, Module, TypeOrVal, Options)
-%% Generate Erlang module (.erl) and (.hrl) file corresponding to an ASN.1 module
-%% .hrl file is only generated if necessary
-%% Erules = per | ber
-%% Module = atom()
-%% TypeOrVal = {TypeList,ValueList}
-%% TypeList = ValueList = [atom()]
-%% Options = [Options] from asn1ct:compile()
-
-pgen(OutFile,Erules,Module,TypeOrVal,Options) ->
- pgen_module(OutFile,Erules,Module,TypeOrVal,Options,true).
-
-
-pgen_module(OutFile,Erules,Module,
- TypeOrVal = {Types,_Values,_Ptypes,_Classes,_Objects,_ObjectSets},
- Options,Indent) ->
- N2nConvEnums = [CName|| {n2n,CName} <- get(encoding_options)],
+%% Generate Erlang module (.erl) and (.hrl) file corresponding to
+%% an ASN.1 module. The .hrl file is only generated if necessary.
+
+-spec pgen(Outfile, Gen, Code) -> 'ok' when
+ Outfile :: any(),
+ Gen :: #gen{},
+ Code :: #abst{}.
+
+pgen(OutFile, #gen{options=Options}=Gen, Code) ->
+ #abst{name=Module,types=Types} = Code,
+ N2nConvEnums = [CName|| {n2n,CName} <- Options],
case N2nConvEnums -- Types of
[] ->
ok;
@@ -69,30 +65,30 @@ pgen_module(OutFile,Erules,Module,
exit({"Non existing ENUMERATION types used in n2n option",
UnmatchedTypes})
end,
- put(outfile,OutFile),
- HrlGenerated = pgen_hrl(Erules,Module,TypeOrVal,Options,Indent),
+ put(outfile, OutFile),
+ put(currmod, Module),
+ HrlGenerated = pgen_hrl(Gen, Code),
asn1ct_name:start(),
ErlFile = lists:concat([OutFile,".erl"]),
_ = open_output_file(ErlFile),
asn1ct_func:start_link(),
- gen_head(Erules,Module,HrlGenerated),
- pgen_exports(Erules,Module,TypeOrVal),
- pgen_dispatcher(Erules,Module,TypeOrVal),
+ gen_head(Gen, Module, HrlGenerated),
+ pgen_exports(Gen, Code),
+ pgen_dispatcher(Gen, Types),
pgen_info(),
- pgen_typeorval(Erules,Module,N2nConvEnums,TypeOrVal),
- pgen_partial_incomplete_decode(Erules),
-% gen_vars(asn1_db:mod_to_vars(Module)),
-% gen_tag_table(AllTypes),
+ pgen_typeorval(Gen, N2nConvEnums, Code),
+ pgen_partial_incomplete_decode(Gen),
emit([nl,
"%%%",nl,
"%%% Run-time functions.",nl,
"%%%",nl]),
- dialyzer_suppressions(Erules),
+ dialyzer_suppressions(Gen),
Fd = get(gen_file_out),
asn1ct_func:generate(Fd),
close_output_file(),
_ = erase(outfile),
- asn1ct:verbose("--~p--~n",[{generated,ErlFile}],Options).
+ asn1ct:verbose("--~p--~n", [{generated,ErlFile}], Gen),
+ ok.
dialyzer_suppressions(Erules) ->
emit([nl,
@@ -100,20 +96,27 @@ dialyzer_suppressions(Erules) ->
Rtmod = ct_gen_module(Erules),
Rtmod:dialyzer_suppressions(Erules).
-pgen_typeorval(Erules,Module,N2nConvEnums,{Types,Values,_Ptypes,_Classes,Objects,ObjectSets}) ->
+pgen_typeorval(Erules, N2nConvEnums, Code) ->
+ #abst{name=Module,types=Types,values=Values,
+ objects=Objects,objsets=ObjectSets} = Code,
Rtmod = ct_gen_module(Erules),
pgen_types(Rtmod,Erules,N2nConvEnums,Module,Types),
- pgen_values(Erules,Module,Values),
+ pgen_values(Values, Module),
pgen_objects(Rtmod,Erules,Module,Objects),
pgen_objectsets(Rtmod,Erules,Module,ObjectSets),
pgen_partial_decode(Rtmod,Erules,Module).
-pgen_values(_,_,[]) ->
- true;
-pgen_values(Erules,Module,[H|T]) ->
- Valuedef = asn1_db:dbget(Module,H),
- gen_value(Valuedef),
- pgen_values(Erules,Module,T).
+%% Generate a function 'V'/0 for each Value V defined in the ASN.1 module.
+%% The function returns the value in an Erlang representation which can be
+%% used as input to the runtime encode functions.
+
+pgen_values([H|T], Module) ->
+ #valuedef{name=Name,value=Value} = asn1_db:dbget(Module, H),
+ emit([{asis,Name},"() ->",nl,
+ {asis,Value},".",nl,nl]),
+ pgen_values(T, Module);
+pgen_values([], _) ->
+ ok.
pgen_types(_, _, _, _, []) ->
true;
@@ -181,10 +184,10 @@ pgen_objectsets(Rtmod,Erules,Module,[H|T]) ->
Rtmod:gen_objectset_code(Erules,TypeDef),
pgen_objectsets(Rtmod,Erules,Module,T).
-pgen_partial_decode(Rtmod,Erule,Module) when Erule == ber ->
- pgen_partial_inc_dec(Rtmod,Erule,Module),
- pgen_partial_dec(Rtmod,Erule,Module);
-pgen_partial_decode(_,_,_) ->
+pgen_partial_decode(Rtmod, #gen{erule=ber}=Gen, Module) ->
+ pgen_partial_inc_dec(Rtmod, Gen, Module),
+ pgen_partial_dec(Rtmod, Gen, Module);
+pgen_partial_decode(_, _, _) ->
ok.
pgen_partial_inc_dec(Rtmod,Erules,Module) ->
@@ -225,7 +228,7 @@ pgen_partial_inc_dec1(Rtmod,Erules,Module,[P|Ps]) ->
pgen_partial_inc_dec1(_,_,_,[]) ->
ok.
-gen_partial_inc_dec_refed_funcs(Rtmod,Erule) when Erule == ber ->
+gen_partial_inc_dec_refed_funcs(Rtmod, #gen{erule=ber}=Gen) ->
case asn1ct:next_refed_func() of
[] ->
ok;
@@ -233,19 +236,17 @@ gen_partial_inc_dec_refed_funcs(Rtmod,Erule) when Erule == ber ->
TypeDef = asn1_db:dbget(M,Name),
asn1ct:update_gen_state(namelist,Pattern),
asn1ct:set_current_sindex(Sindex),
- Rtmod:gen_inc_decode(Erule,TypeDef),
- gen_dec_part_inner_constr(Rtmod,Erule,TypeDef,[Name]),
- gen_partial_inc_dec_refed_funcs(Rtmod,Erule);
+ Rtmod:gen_inc_decode(Gen, TypeDef),
+ gen_dec_part_inner_constr(Rtmod, Gen, TypeDef, [Name]),
+ gen_partial_inc_dec_refed_funcs(Rtmod, Gen);
{Name,Sindex,Pattern,Type} ->
TypeDef=#typedef{name=asn1ct_gen:list2name(Name),typespec=Type},
asn1ct:update_gen_state(namelist,Pattern),
asn1ct:set_current_sindex(Sindex),
- Rtmod:gen_inc_decode(Erule,TypeDef),
- gen_dec_part_inner_constr(Rtmod,Erule,TypeDef,Name),
- gen_partial_inc_dec_refed_funcs(Rtmod,Erule)
- end;
-gen_partial_inc_dec_refed_funcs(_,_) ->
- ok.
+ Rtmod:gen_inc_decode(Gen, TypeDef),
+ gen_dec_part_inner_constr(Rtmod, Gen, TypeDef, Name),
+ gen_partial_inc_dec_refed_funcs(Rtmod, Gen)
+ end.
pgen_partial_dec(_Rtmod,Erules,_Module) ->
Type_pattern = asn1ct:get_gen_state_field(type_pattern),
@@ -254,16 +255,16 @@ pgen_partial_dec(_Rtmod,Erules,_Module) ->
pgen_partial_types(Erules,Type_pattern),
ok.
-pgen_partial_types(Erules,Type_pattern) ->
- % until this functionality works on all back-ends
- Options = get(encoding_options),
- case lists:member(asn1config,Options) of
+pgen_partial_types(#gen{options=Options}=Gen, TypePattern) ->
+ %% until this functionality works on all back-ends
+ case lists:member(asn1config, Options) of
true ->
- pgen_partial_types1(Erules,Type_pattern);
- _ -> ok
+ pgen_partial_types1(Gen, TypePattern);
+ false ->
+ ok
end.
-
+
pgen_partial_types1(Erules,[{FuncName,[TopType|RestTypes]}|Rest]) ->
% emit([FuncName,"(Bytes) ->",nl]),
CurrMod = get(currmod),
@@ -441,7 +442,8 @@ pgen_partial_incomplete_decode(Erule) ->
_ ->
ok
end.
-pgen_partial_incomplete_decode1(ber) ->
+
+pgen_partial_incomplete_decode1(#gen{erule=ber}) ->
case asn1ct:read_config_data(partial_incomplete_decode) of
undefined ->
ok;
@@ -451,7 +453,7 @@ pgen_partial_incomplete_decode1(ber) ->
GeneratedFs= asn1ct:get_gen_state_field(gen_refed_funcs),
% io:format("GeneratedFs :~n~p~n",[GeneratedFs]),
gen_part_decode_funcs(GeneratedFs,0);
-pgen_partial_incomplete_decode1(_) -> ok.
+pgen_partial_incomplete_decode1(#gen{}) -> ok.
emit_partial_incomplete_decode({FuncName,TopType,Pattern}) ->
TypePattern = asn1ct:get_gen_state_field(inc_type_pattern),
@@ -578,18 +580,6 @@ un_hyphen_var([H|T]) ->
un_hyphen_var([]) ->
[].
-%% Generate value functions ***************
-%% ****************************************
-%% Generates a function 'V'/0 for each Value V defined in the ASN.1 module
-%% the function returns the value in an Erlang representation which can be
-%% used as input to the runtime encode functions
-
-gen_value(Value) when is_record(Value,valuedef) ->
-%% io:format(" ~w ",[Value#valuedef.name]),
- emit({"'",Value#valuedef.name,"'() ->",nl}),
- V = Value#valuedef.value,
- emit([{asis,V},".",nl,nl]).
-
gen_encode_constructed(Erules,Typename,InnerType,D) when is_record(D,type) ->
Rtmod = ct_constructed_module(Erules),
case InnerType of
@@ -654,78 +644,32 @@ gen_decode_constructed(Erules,Typename,InnerType,D) when is_record(D,typedef) ->
gen_decode_constructed(Erules,Typename,InnerType,D#typedef.typespec).
-pgen_exports(Erules,_Module,{Types,Values,_,_,Objects,ObjectSets}) ->
- emit(["-export([encoding_rule/0,bit_string_format/0,",nl,
+pgen_exports(#gen{options=Options}=Gen, Code) ->
+ #abst{types=Types,values=Values,objects=Objects,objsets=ObjectSets} = Code,
+ emit(["-export([encoding_rule/0,maps/0,bit_string_format/0,",nl,
" legacy_erlang_types/0]).",nl]),
emit(["-export([",{asis,?SUPPRESSION_FUNC},"/1]).",nl]),
- case Types of
- [] -> ok;
- _ ->
- emit({"-export([",nl}),
- case Erules of
- ber ->
- gen_exports1(Types,"enc_",2);
- _ ->
- gen_exports1(Types,"enc_",1)
- end,
- emit({"-export([",nl}),
- case Erules of
- ber ->
- gen_exports1(Types, "dec_", 2);
- _ ->
- gen_exports1(Types, "dec_", 1)
- end
- end,
- case [X || {n2n,X} <- get(encoding_options)] of
- [] -> ok;
- A2nNames ->
- emit({"-export([",nl}),
- gen_exports1(A2nNames,"name2num_",1),
- emit({"-export([",nl}),
- gen_exports1(A2nNames,"num2name_",1)
- end,
- case Values of
- [] -> ok;
- _ ->
- emit({"-export([",nl}),
- gen_exports1(Values,"",0)
+ case Gen of
+ #gen{erule=ber} ->
+ gen_exports(Types, "enc_", 2),
+ gen_exports(Types, "dec_", 2),
+ gen_exports(Objects, "enc_", 3),
+ gen_exports(Objects, "dec_", 3),
+ gen_exports(ObjectSets, "getenc_", 1),
+ gen_exports(ObjectSets, "getdec_", 1);
+ #gen{erule=per} ->
+ gen_exports(Types, "enc_", 1),
+ gen_exports(Types, "dec_", 1)
end,
- case Objects of
- [] -> ok;
- _ ->
- case erule(Erules) of
- per ->
- ok;
- ber ->
- emit({"-export([",nl}),
- gen_exports1(Objects,"enc_",3),
- emit({"-export([",nl}),
- gen_exports1(Objects,"dec_",3)
- end
- end,
- case ObjectSets of
- [] -> ok;
- _ ->
- case erule(Erules) of
- per ->
- ok;
- ber ->
- emit({"-export([",nl}),
- gen_exports1(ObjectSets, "getenc_",1),
- emit({"-export([",nl}),
- gen_exports1(ObjectSets, "getdec_",1)
- end
- end,
- emit({"-export([info/0]).",nl}),
- gen_partial_inc_decode_exports(),
- gen_selected_decode_exports(),
- emit({nl,nl}).
-gen_exports1([F1,F2|T],Prefix,Arity) ->
- emit({"'",Prefix,F1,"'/",Arity,com,nl}),
- gen_exports1([F2|T],Prefix,Arity);
-gen_exports1([Flast|_T],Prefix,Arity) ->
- emit({"'",Prefix,Flast,"'/",Arity,nl,"]).",nl,nl}).
+ A2nNames = [X || {n2n,X} <- Options],
+ gen_exports(A2nNames, "name2num_", 1),
+ gen_exports(A2nNames, "num2name_", 1),
+
+ gen_exports(Values, "", 0),
+ emit(["-export([info/0]).",nl,nl]),
+ gen_partial_inc_decode_exports(),
+ gen_selected_decode_exports().
gen_partial_inc_decode_exports() ->
case {asn1ct:read_config_data(partial_incomplete_decode),
@@ -734,66 +678,54 @@ gen_partial_inc_decode_exports() ->
ok;
{_,undefined} ->
ok;
- {Data,_} ->
- gen_partial_inc_decode_exports(Data),
- emit(["-export([decode_part/2]).",nl])
+ {Data0,_} ->
+ Data = [Name || {Name,_,_} <- Data0],
+ gen_exports(Data, "", 1),
+ emit(["-export([decode_part/2]).",nl,nl])
end.
-gen_partial_inc_decode_exports([]) ->
- ok;
-gen_partial_inc_decode_exports([{Name,_,_}|Rest]) ->
- emit(["-export([",Name,"/1"]),
- gen_partial_inc_decode_exports1(Rest);
-gen_partial_inc_decode_exports([_|Rest]) ->
- gen_partial_inc_decode_exports(Rest).
-
-gen_partial_inc_decode_exports1([]) ->
- emit(["]).",nl]);
-gen_partial_inc_decode_exports1([{Name,_,_}|Rest]) ->
- emit([", ",Name,"/1"]),
- gen_partial_inc_decode_exports1(Rest);
-gen_partial_inc_decode_exports1([_|Rest]) ->
- gen_partial_inc_decode_exports1(Rest).
gen_selected_decode_exports() ->
case asn1ct:get_gen_state_field(type_pattern) of
undefined ->
ok;
- L ->
- gen_selected_decode_exports(L)
+ Data0 ->
+ Data = [Name || {Name,_} <- Data0],
+ gen_exports(Data, "", 1)
end.
-gen_selected_decode_exports([]) ->
+gen_exports([], _Prefix, _Arity) ->
ok;
-gen_selected_decode_exports([{FuncName,_}|Rest]) ->
- emit(["-export([",FuncName,"/1"]),
- gen_selected_decode_exports1(Rest).
-gen_selected_decode_exports1([]) ->
- emit(["]).",nl,nl]);
-gen_selected_decode_exports1([{FuncName,_}|Rest]) ->
- emit([",",nl," ",FuncName,"/1"]),
- gen_selected_decode_exports1(Rest).
-
-pgen_dispatcher(Erules,_Module,{[],_Values,_,_,_Objects,_ObjectSets}) ->
+gen_exports([_|_]=L0, Prefix, Arity) ->
+ FF = fun(F0) ->
+ F = list_to_atom(lists:concat([Prefix,F0])),
+ [{asis,F},"/",Arity]
+ end,
+ L = lists:join(",\n", [FF(F) || F <- L0]),
+ emit(["-export([",nl,
+ L,nl,
+ "]).",nl,nl]).
+
+pgen_dispatcher(Erules, []) ->
gen_info_functions(Erules);
-pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) ->
+pgen_dispatcher(Gen, Types) ->
emit(["-export([encode/2,decode/2]).",nl,nl]),
- gen_info_functions(Erules),
+ gen_info_functions(Gen),
- Options = get(encoding_options),
+ Options = Gen#gen.options,
NoFinalPadding = lists:member(no_final_padding, Options),
NoOkWrapper = proplists:get_bool(no_ok_wrapper, Options),
- Call = case Erules of
- per ->
- asn1ct_func:need({Erules,complete,1}),
+ Call = case Gen of
+ #gen{erule=per,aligned=true} ->
+ asn1ct_func:need({per,complete,1}),
"complete(encode_disp(Type, Data))";
- ber ->
+ #gen{erule=ber} ->
"iolist_to_binary(element(1, encode_disp(Type, Data)))";
- uper when NoFinalPadding == true ->
- asn1ct_func:need({Erules,complete_NFP,1}),
+ #gen{erule=per,aligned=false} when NoFinalPadding ->
+ asn1ct_func:need({uper,complete_NFP,1}),
"complete_NFP(encode_disp(Type, Data))";
- uper ->
- asn1ct_func:need({Erules,complete,1}),
+ #gen{erule=per,aligned=false} ->
+ asn1ct_func:need({uper,complete,1}),
"complete(encode_disp(Type, Data))"
end,
@@ -809,36 +741,36 @@ pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) ->
end,
emit([nl,nl]),
- Return_rest = proplists:get_bool(undec_rest, Options),
- Data = case {Erules,Return_rest} of
- {ber,true} -> "Data0";
- _ -> "Data"
+ ReturnRest = proplists:get_bool(undec_rest, Gen#gen.options),
+ Data = case Gen#gen.erule =:= ber andalso ReturnRest of
+ true -> "Data0";
+ false -> "Data"
end,
- emit(["decode(Type,",Data,") ->",nl]),
+ emit(["decode(Type, ",Data,") ->",nl]),
DecWrap =
- case {Erules,Return_rest} of
- {ber,false} ->
+ case {Gen,ReturnRest} of
+ {#gen{erule=ber},false} ->
asn1ct_func:need({ber,ber_decode_nif,1}),
"element(1, ber_decode_nif(Data))";
- {ber,true} ->
+ {#gen{erule=ber},true} ->
asn1ct_func:need({ber,ber_decode_nif,1}),
emit(["{Data,Rest} = ber_decode_nif(Data0),",nl]),
"Data";
- _ ->
+ {_,_} ->
"Data"
end,
emit([case NoOkWrapper of
false -> "try";
true -> "case"
end, " decode_disp(Type, ",DecWrap,") of",nl]),
- case erule(Erules) of
- ber ->
+ case Gen of
+ #gen{erule=ber} ->
emit([" Result ->",nl]);
- per ->
+ #gen{erule=per} ->
emit([" {Result,Rest} ->",nl])
end,
- case Return_rest of
+ case ReturnRest of
false -> result_line(NoOkWrapper, ["Result"]);
true -> result_line(NoOkWrapper, ["Result","Rest"])
end,
@@ -849,18 +781,11 @@ pgen_dispatcher(Erules,_Module,{Types,_Values,_,_,_Objects,_ObjectSets}) ->
emit([nl,"end.",nl,nl])
end,
- gen_decode_partial_incomplete(Erules),
+ gen_decode_partial_incomplete(Gen),
+ gen_partial_inc_dispatcher(Gen),
- case Erules of
- ber ->
- gen_dispatcher(Types,"encode_disp","enc_",""),
- gen_dispatcher(Types,"decode_disp","dec_",""),
- gen_partial_inc_dispatcher();
- _PerOrPer_bin ->
- gen_dispatcher(Types,"encode_disp","enc_",""),
- gen_dispatcher(Types,"decode_disp","dec_","")
- end,
- emit([nl,nl]).
+ gen_dispatcher(Types, "encode_disp", "enc_"),
+ gen_dispatcher(Types, "decode_disp", "dec_").
result_line(NoOkWrapper, Items) ->
S = [" "|case NoOkWrapper of
@@ -885,15 +810,26 @@ try_catch() ->
" end",nl,
"end."].
-gen_info_functions(Erules) ->
+gen_info_functions(Gen) ->
+ Erule = case Gen of
+ #gen{erule=ber} -> ber;
+ #gen{erule=per,aligned=false} -> uper;
+ #gen{erule=per,aligned=true} -> per
+ end,
+ Maps = case Gen of
+ #gen{pack=record} -> false;
+ #gen{pack=map} -> true
+ end,
emit(["encoding_rule() -> ",
- {asis,Erules},".",nl,nl,
+ {asis,Erule},".",nl,nl,
+ "maps() -> ",
+ {asis,Maps},".",nl,nl,
"bit_string_format() -> ",
{asis,asn1ct:get_bit_string_format()},".",nl,nl,
"legacy_erlang_types() -> ",
{asis,asn1ct:use_legacy_types()},".",nl,nl]).
-gen_decode_partial_incomplete(ber) ->
+gen_decode_partial_incomplete(#gen{erule=ber}) ->
case {asn1ct:read_config_data(partial_incomplete_decode),
asn1ct:get_gen_state_field(inc_type_pattern)} of
{undefined,_} ->
@@ -931,10 +867,10 @@ gen_decode_partial_incomplete(ber) ->
EmitCaseClauses(),
emit([".",nl,nl])
end;
-gen_decode_partial_incomplete(_Erule) ->
+gen_decode_partial_incomplete(#gen{}) ->
ok.
-gen_partial_inc_dispatcher() ->
+gen_partial_inc_dispatcher(#gen{erule=ber}) ->
case {asn1ct:read_config_data(partial_incomplete_decode),
asn1ct:get_gen_state_field(inc_type_pattern)} of
{undefined,_} ->
@@ -944,7 +880,9 @@ gen_partial_inc_dispatcher() ->
{Data1,Data2} ->
% io:format("partial_incomplete_decode: ~p~ninc_type_pattern: ~p~n",[Data,Data2]),
gen_partial_inc_dispatcher(Data1, Data2, "")
- end.
+ end;
+gen_partial_inc_dispatcher(#gen{}) ->
+ ok.
gen_partial_inc_dispatcher([{FuncName,TopType,_Pattern}|Rest], TypePattern, Sep) ->
TPattern =
@@ -968,12 +906,18 @@ gen_partial_inc_dispatcher([{FuncName,TopType,_Pattern}|Rest], TypePattern, Sep)
gen_partial_inc_dispatcher([], _, _) ->
emit([".",nl]).
-gen_dispatcher([F1,F2|T],FuncName,Prefix,ExtraArg) ->
- emit([FuncName,"('",F1,"',Data) -> '",Prefix,F1,"'(Data",ExtraArg,")",";",nl]),
- gen_dispatcher([F2|T],FuncName,Prefix,ExtraArg);
-gen_dispatcher([Flast|_T],FuncName,Prefix,ExtraArg) ->
- emit([FuncName,"('",Flast,"',Data) -> '",Prefix,Flast,"'(Data",ExtraArg,")",";",nl]),
- emit([FuncName,"(","Type",",_Data) -> exit({error,{asn1,{undefined_type,Type}}}).",nl,nl,nl]).
+gen_dispatcher(L, DispFunc, Prefix) ->
+ gen_dispatcher_1(L, DispFunc, Prefix),
+ emit([DispFunc,"(","Type",", _Data) ->"
+ " exit({error,{asn1,{undefined_type,Type}}}).",nl,nl]).
+
+gen_dispatcher_1([F|T], FuncName, Prefix) ->
+ Func = list_to_atom(lists:concat([Prefix,F])),
+ emit([FuncName,"(",{asis,F},", Data) -> ",
+ {asis,Func},"(Data)",";",nl]),
+ gen_dispatcher_1(T, FuncName, Prefix);
+gen_dispatcher_1([], _, _) ->
+ ok.
pgen_info() ->
emit(["info() ->",nl,
@@ -1092,22 +1036,21 @@ open_output_file(F) ->
close_output_file() ->
ok = file:close(erase(gen_file_out)).
-pgen_hrl(Erules,Module,TypeOrVal,Options,_Indent) ->
- put(currmod,Module),
- {Types,Values,Ptypes,_,_,_} = TypeOrVal,
+pgen_hrl(#gen{pack=record}=Gen, Code) ->
+ #abst{name=Module,types=Types,values=Values,ptypes=Ptypes} = Code,
Ret =
- case pgen_hrltypes(Erules,Module,Ptypes++Types,0) of
+ case pgen_hrltypes(Gen, Module, Ptypes++Types, 0) of
0 ->
case Values of
[] ->
0;
_ ->
- open_hrl(get(outfile),get(currmod)),
- pgen_macros(Erules,Module,Values),
+ open_hrl(get(outfile), Module),
+ pgen_macros(Gen, Module, Values),
1
end;
X ->
- pgen_macros(Erules,Module,Values),
+ pgen_macros(Gen, Module, Values),
X
end,
case Ret of
@@ -1119,62 +1062,61 @@ pgen_hrl(Erules,Module,TypeOrVal,Options,_Indent) ->
close_output_file(),
asn1ct:verbose("--~p--~n",
[{generated,lists:concat([get(outfile),".hrl"])}],
- Options),
+ Gen),
Y
- end.
+ end;
+pgen_hrl(#gen{pack=map}, _) ->
+ 0.
pgen_macros(_,_,[]) ->
true;
-pgen_macros(Erules,Module,[H|T]) ->
- Valuedef = asn1_db:dbget(Module,H),
- gen_macro(Valuedef),
- pgen_macros(Erules,Module,T).
+pgen_macros(Gen, Module, [H|T]) ->
+ Valuedef = asn1_db:dbget(Module, H),
+ gen_macro(Gen, Valuedef),
+ pgen_macros(Gen, Module, T).
pgen_hrltypes(_,_,[],NumRecords) ->
NumRecords;
-pgen_hrltypes(Erules,Module,[H|T],NumRecords) ->
-% io:format("records = ~p~n",NumRecords),
- Typedef = asn1_db:dbget(Module,H),
- AddNumRecords = gen_record(Typedef,NumRecords),
- pgen_hrltypes(Erules,Module,T,NumRecords+AddNumRecords).
+pgen_hrltypes(Gen, Module, [H|T], NumRecords) ->
+ Typedef = asn1_db:dbget(Module, H),
+ AddNumRecords = gen_record(Gen, Typedef, NumRecords),
+ pgen_hrltypes(Gen, Module, T, NumRecords+AddNumRecords).
%% Generates a macro for value Value defined in the ASN.1 module
-gen_macro(Value) when is_record(Value,valuedef) ->
- Prefix = get_macro_name_prefix(),
- emit({"-define('",Prefix,Value#valuedef.name,"', ",
- {asis,Value#valuedef.value},").",nl}).
+gen_macro(Gen, #valuedef{name=Name,value=Value}) ->
+ Prefix = get_macro_name_prefix(Gen),
+ emit(["-define('",Prefix,Name,"', ",{asis,Value},").",nl]).
%% Generate record functions **************
%% Generates an Erlang record for each named and unnamed SEQUENCE and SET in the ASN.1
%% module. If no SEQUENCE or SET is found there is no .hrl file generated
-gen_record(Tdef,NumRecords) when is_record(Tdef,typedef) ->
+gen_record(Gen, #typedef{}=Tdef, NumRecords) ->
Name = [Tdef#typedef.name],
Type = Tdef#typedef.typespec,
- gen_record(type,Name,Type,NumRecords);
-
-gen_record(Tdef,NumRecords) when is_record(Tdef,ptypedef) ->
+ gen_record(Gen, type, Name, Type, NumRecords);
+gen_record(Gen, #ptypedef{}=Tdef, NumRecords) ->
Name = [Tdef#ptypedef.name],
Type = Tdef#ptypedef.typespec,
- gen_record(ptype,Name,Type,NumRecords).
-
-gen_record(TorPtype,Name,[#'ComponentType'{name=Cname,typespec=Type}|T],Num) ->
- Num2 = gen_record(TorPtype,[Cname|Name],Type,Num),
- gen_record(TorPtype,Name,T,Num2);
-gen_record(TorPtype,Name,{Clist1,Clist2},Num)
+ gen_record(Gen, ptype, Name, Type, NumRecords).
+
+gen_record(Gen, TorPtype, Name,
+ [#'ComponentType'{name=Cname,typespec=Type}|T], Num) ->
+ Num2 = gen_record(Gen, TorPtype, [Cname|Name], Type, Num),
+ gen_record(Gen, TorPtype, Name, T, Num2);
+gen_record(Gen, TorPtype, Name, {Clist1,Clist2}, Num)
when is_list(Clist1), is_list(Clist2) ->
- gen_record(TorPtype,Name,Clist1++Clist2,Num);
-gen_record(TorPtype,Name,{Clist1,EClist,Clist2},Num)
+ gen_record(Gen, TorPtype, Name, Clist1++Clist2, Num);
+gen_record(Gen, TorPtype, Name, {Clist1,EClist,Clist2}, Num)
when is_list(Clist1), is_list(EClist), is_list(Clist2) ->
- gen_record(TorPtype,Name,Clist1++EClist++Clist2,Num);
-gen_record(TorPtype,Name,[_|T],Num) -> % skip EXTENSIONMARK
- gen_record(TorPtype,Name,T,Num);
-gen_record(_TorPtype,_Name,[],Num) ->
+ gen_record(Gen, TorPtype, Name, Clist1++EClist++Clist2, Num);
+gen_record(Gen, TorPtype, Name, [_|T], Num) -> % skip EXTENSIONMARK
+ gen_record(Gen, TorPtype, Name, T, Num);
+gen_record(_Gen, _TorPtype, _Name, [], Num) ->
Num;
-
-gen_record(TorPtype,Name,Type,Num) when is_record(Type,type) ->
+gen_record(Gen, TorPtype, Name, #type{}=Type, Num) ->
Def = Type#type.def,
Rec = case Def of
Seq when is_record(Seq,'SEQUENCE') ->
@@ -1209,127 +1151,103 @@ gen_record(TorPtype,Name,Type,Num) when is_record(Type,type) ->
0 -> open_hrl(get(outfile),get(currmod));
_ -> true
end,
- Prefix = get_record_name_prefix(),
- emit({"-record('",Prefix,list2name(Name),"',{",nl}),
- RootList = case CompList of
- _ when is_list(CompList) ->
- CompList;
- {Rl,_} -> Rl;
- {Rl1,_Ext,_Rl2} -> Rl1
- end,
- gen_record2(Name,'SEQUENCE',RootList),
- NewCompList =
+ do_gen_record(Gen, Name, CompList),
+ NewCompList =
case CompList of
{CompList1,[]} ->
- emit({"}). % with extension mark",nl,nl}),
CompList1;
{Tr,ExtensionList2} ->
- case Tr of
- [] -> true;
- _ -> emit({",",nl})
- end,
- emit({"%% with extensions",nl}),
- gen_record2(Name, 'SEQUENCE', ExtensionList2,
- "", ext),
- emit({"}).",nl,nl}),
Tr ++ ExtensionList2;
{Rootl1,Extl,Rootl2} ->
- case Rootl1 =/= [] andalso Extl++Rootl2 =/= [] of
- true -> emit([com]);
- false -> ok
- end,
- case Rootl1 of
- [_|_] -> emit([nl]);
- [] -> ok
- end,
- emit(["%% with extensions",nl]),
- gen_record2(Name,'SEQUENCE',Extl,"",ext),
- case Extl =/= [] andalso Rootl2 =/= [] of
- true -> emit([com]);
- false -> ok
- end,
- case Extl of
- [_|_] -> emit([nl]);
- [] -> ok
- end,
- emit(["%% end of extensions",nl]),
- gen_record2(Name,'SEQUENCE',Rootl2,"",noext),
- emit(["}).",nl,nl]),
Rootl1++Extl++Rootl2;
- _ ->
- emit({"}).",nl,nl}),
+ _ ->
CompList
end,
- gen_record(TorPtype,Name,NewCompList,Num+1);
+ gen_record(Gen, TorPtype, Name, NewCompList, Num+1);
{inner,{'CHOICE', CompList}} ->
- gen_record(TorPtype,Name,CompList,Num);
+ gen_record(Gen, TorPtype, Name, CompList, Num);
{NewName,{_, CompList}} ->
- gen_record(TorPtype,NewName,CompList,Num)
+ gen_record(Gen, TorPtype, NewName, CompList, Num)
end;
-gen_record(_,_,_,NumRecords) -> % skip CLASS etc for now.
+gen_record(_, _, _, _, NumRecords) -> % skip CLASS etc for now.
NumRecords.
-
-gen_head(Erules,Mod,Hrl) ->
- Options = get(encoding_options),
- case Erules of
- per ->
- emit(["%% Generated by the Erlang ASN.1 PER-"
- "compiler version, utilizing bit-syntax:",
- asn1ct:vsn(),nl]);
- ber ->
- emit(["%% Generated by the Erlang ASN.1 BER_V2-"
- "compiler version, utilizing bit-syntax:",
- asn1ct:vsn(),nl]);
- uper ->
- emit(["%% Generated by the Erlang ASN.1 UNALIGNED"
- " PER-compiler version, utilizing bit-syntax:",
- asn1ct:vsn(),nl])
+
+do_gen_record(Gen, Name, CL0) ->
+ CL = case CL0 of
+ {Root,[]} ->
+ Root ++ [{comment,"with extension mark"}];
+ {Root,Ext} ->
+ Root ++ [{comment,"with exensions"}] ++
+ only_components(Ext);
+ {Root1,Ext,Root2} ->
+ Root1 ++ [{comment,"with exensions"}] ++
+ only_components(Ext) ++
+ [{comment,"end of extensions"}] ++ Root2;
+ _ when is_list(CL0) ->
+ CL0
+ end,
+ Prefix = get_record_name_prefix(Gen),
+ emit(["-record('",Prefix,list2name(Name),"', {"] ++
+ do_gen_record_1(CL) ++
+ [nl,"}).",nl,nl]).
+
+only_components(CL) ->
+ [C || #'ComponentType'{}=C <- CL].
+
+do_gen_record_1([#'ComponentType'{name=Name,prop=Prop}|T]) ->
+ Val = case Prop of
+ 'OPTIONAL' ->
+ " = asn1_NOVALUE";
+ {'DEFAULT',_} ->
+ " = asn1_DEFAULT";
+ _ ->
+ []
+ end,
+ Com = case needs_trailing_comma(T) of
+ true -> [com];
+ false -> []
end,
- emit({"%% Purpose: encoder and decoder to the types in mod ",Mod,nl,nl}),
- emit({"-module('",Mod,"').",nl}),
- put(currmod,Mod),
- emit({"-compile(nowarn_unused_vars).",nl}),
- emit({"-dialyzer(no_improper_lists).",nl}),
+ [nl," ",{asis,Name},Val,Com|do_gen_record_1(T)];
+do_gen_record_1([{comment,Text}|T]) ->
+ [nl," %% ",Text|do_gen_record_1(T)];
+do_gen_record_1([]) ->
+ [].
+
+needs_trailing_comma([#'ComponentType'{}|_]) -> true;
+needs_trailing_comma([_|T]) -> needs_trailing_comma(T);
+needs_trailing_comma([]) -> false.
+
+gen_head(#gen{options=Options}=Gen, Mod, Hrl) ->
+ Name = case Gen of
+ #gen{erule=per,aligned=false} ->
+ "PER (unaligned)";
+ #gen{erule=per,aligned=true} ->
+ "PER (aligned)";
+ #gen{erule=ber} ->
+ "BER"
+ end,
+ emit(["%% Generated by the Erlang ASN.1 ",Name,
+ " compiler. Version: ",asn1ct:vsn(),nl,
+ "%% Purpose: Encoding and decoding of the types in ",
+ Mod,".",nl,nl,
+ "-module('",Mod,"').",nl,
+ "-compile(nowarn_unused_vars).",nl,
+ "-dialyzer(no_improper_lists).",nl]),
case Hrl of
0 -> ok;
- _ -> emit({"-include(\"",Mod,".hrl\").",nl})
+ _ -> emit(["-include(\"",Mod,".hrl\").",nl])
end,
emit(["-asn1_info([{vsn,'",asn1ct:vsn(),"'},",nl,
" {module,'",Mod,"'},",nl,
" {options,",io_lib:format("~p",[Options]),"}]).",nl,nl]).
-
-
-gen_hrlhead(Mod) ->
- emit({"%% Generated by the Erlang ASN.1 compiler version:",asn1ct:vsn(),nl}),
- emit({"%% Purpose: Erlang record definitions for each named and unnamed",nl}),
- emit({"%% SEQUENCE and SET, and macro definitions for each value",nl}),
- emit({"%% definition,in module ",Mod,nl,nl}),
- emit({nl,nl}).
-gen_record2(Name,SeqOrSet,Comps) ->
- gen_record2(Name,SeqOrSet,Comps,"",noext).
-gen_record2(_Name,_SeqOrSet,[],_Com,_Extension) ->
- true;
-gen_record2(_Name,_SeqOrSet,[H = #'ComponentType'{name=Cname}],Com,Extension) ->
- emit(Com),
- emit({asis,Cname}),
- gen_record_default(H, Extension);
-gen_record2(Name,SeqOrSet,[H = #'ComponentType'{name=Cname}|T],Com, Extension) ->
- emit(Com),
- emit({asis,Cname}),
- gen_record_default(H, Extension),
- gen_record2(Name,SeqOrSet,T,", ", Extension);
-gen_record2(Name,SeqOrSet,[_|T],Com,Extension) ->
- %% skip EXTENSIONMARK, ExtensionAdditionGroup and other markers
- gen_record2(Name,SeqOrSet,T,Com,Extension).
-
-gen_record_default(#'ComponentType'{prop='OPTIONAL'}, _)->
- emit(" = asn1_NOVALUE");
-gen_record_default(#'ComponentType'{prop={'DEFAULT',_}}, _)->
- emit(" = asn1_DEFAULT");
-gen_record_default(_, _) ->
- true.
+gen_hrlhead(Mod) ->
+ emit(["%% Generated by the Erlang ASN.1 compiler. Version: ",
+ asn1ct:vsn(),nl,
+ "%% Purpose: Erlang record definitions for each named and unnamed",nl,
+ "%% SEQUENCE and SET, and macro definitions for each value",nl,
+ "%% definition in module ",Mod,".",nl,nl]).
%% May only be a list or a two-tuple.
to_textual_order({Root,Ext}) ->
@@ -1585,27 +1503,19 @@ constructed_suffix('SEQUENCE OF',_) ->
constructed_suffix('SET OF',_) ->
'SETOF'.
-erule(ber) -> ber;
-erule(per) -> per;
-erule(uper) -> per.
-
index2suffix(0) ->
"";
index2suffix(N) ->
lists:concat(["_",N]).
-ct_gen_module(ber) ->
+ct_gen_module(#gen{erule=ber}) ->
asn1ct_gen_ber_bin_v2;
-ct_gen_module(per) ->
- asn1ct_gen_per;
-ct_gen_module(uper) ->
+ct_gen_module(#gen{erule=per}) ->
asn1ct_gen_per.
-ct_constructed_module(ber) ->
+ct_constructed_module(#gen{erule=ber}) ->
asn1ct_constructed_ber_bin_v2;
-ct_constructed_module(per) ->
- asn1ct_constructed_per;
-ct_constructed_module(uper) ->
+ct_constructed_module(#gen{erule=per}) ->
asn1ct_constructed_per.
get_constraint(C,Key) ->
@@ -1617,19 +1527,9 @@ get_constraint(C,Key) ->
{value,Cnstr} ->
Cnstr
end.
-
-get_record_name_prefix() ->
- case lists:keysearch(record_name_prefix,1,get(encoding_options)) of
- false ->
- "";
- {value,{_,Prefix}} ->
- Prefix
- end.
-get_macro_name_prefix() ->
- case lists:keysearch(macro_name_prefix,1,get(encoding_options)) of
- false ->
- "";
- {value,{_,Prefix}} ->
- Prefix
- end.
+get_record_name_prefix(#gen{rec_prefix=Prefix}) ->
+ Prefix.
+
+get_macro_name_prefix(#gen{macro_prefix=Prefix}) ->
+ Prefix.
diff --git a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
index b884d14b0d..6c6d4193f3 100644
--- a/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
+++ b/lib/asn1/src/asn1ct_gen_ber_bin_v2.erl
@@ -1200,11 +1200,13 @@ gen_objset_enc(Erules, ObjSetName, UniqueName,
{no_mod,no_name} ->
gen_inlined_enc_funs(Fields, ClFields, ObjSetName, Val, NthObj);
{CurrMod,Name} ->
- emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl,
+ emit(["'getenc_",ObjSetName,"'(Id) when Id =:= ",
+ {asis,Val}," ->",nl,
" fun 'enc_",Name,"'/3;",nl]),
{[],NthObj};
{ModuleName,Name} ->
- emit(["'getenc_",ObjSetName,"'(",{asis,Val},") ->",nl]),
+ emit(["'getenc_",ObjSetName,"'(Id) when Id =:= ",
+ {asis,Val}," ->",nl]),
emit_ext_fun(enc,ModuleName,Name),
emit([";",nl]),
{[],NthObj};
@@ -1382,11 +1384,13 @@ gen_objset_dec(Erules, ObjSName, UniqueName, [{ObjName,Val,Fields}|T],
{no_mod,no_name} ->
gen_inlined_dec_funs(Fields,ClFields,ObjSName,Val,NthObj);
{CurrMod,Name} ->
- emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl,
+ emit(["'getdec_",ObjSName,"'(Id) when Id =:= ",
+ {asis,Val}," ->",nl,
" fun 'dec_",Name,"'/3;", nl]),
NthObj;
{ModuleName,Name} ->
- emit(["'getdec_",ObjSName,"'(",{asis,Val},") ->",nl]),
+ emit(["'getdec_",ObjSName,"'(Id) when Id =:= ",
+ {asis,Val}," ->",nl]),
emit_ext_fun(dec,ModuleName,Name),
emit([";",nl]),
NthObj;
diff --git a/lib/asn1/src/asn1ct_gen_check.erl b/lib/asn1/src/asn1ct_gen_check.erl
index abe77dd0cb..ccc62a3ce3 100644
--- a/lib/asn1/src/asn1ct_gen_check.erl
+++ b/lib/asn1/src/asn1ct_gen_check.erl
@@ -21,45 +21,51 @@
%%
-module(asn1ct_gen_check).
--export([emit/3]).
+-export([emit/4]).
-import(asn1ct_gen, [emit/1]).
-include("asn1_records.hrl").
-emit(Type, Default, Value) ->
+emit(Gen, Type, Default, Value) ->
Key = {Type,Default},
- Gen = fun(Fd, Name) ->
- file:write(Fd, gen(Name, Type, Default))
- end,
+ DoGen = fun(Fd, Name) ->
+ file:write(Fd, gen(Gen, Name, Type, Default))
+ end,
emit(" case "),
- asn1ct_func:call_gen("is_default_", Key, Gen, [Value]),
+ asn1ct_func:call_gen("is_default_", Key, DoGen, [Value]),
emit([" of",nl,
"true -> {[],0};",nl,
"false ->",nl]).
-gen(Name, #type{def=T}, Default) ->
+gen(#gen{pack=Pack}=Gen, Name, #type{def=T}, Default) ->
+ DefMarker = case Pack of
+ record -> "asn1_DEFAULT";
+ map -> atom_to_list(?MISSING_IN_MAP)
+ end,
NameStr = atom_to_list(Name),
- [NameStr,"(asn1_DEFAULT) ->\n",
- "true;\n"|case do_gen(T, Default) of
- {literal,Literal} ->
- [NameStr,"(",term2str(Literal),") ->\n","true;\n",
- NameStr,"(_) ->\n","false.\n\n"];
- {exception,Func,Args} ->
- [NameStr,"(Value) ->\n",
- "try ",Func,"(Value",arg2str(Args),") of\n",
- "_ -> true\n"
- "catch throw:false -> false\n"
- "end.\n\n"]
- end].
+ [NameStr,"(",DefMarker,") ->\n",
+ "true;\n"|
+ case do_gen(Gen, T, Default) of
+ {literal,Literal} ->
+ [NameStr,"(Def) when Def =:= ",term2str(Literal)," ->\n",
+ "true;\n",
+ NameStr,"(_) ->\n","false.\n\n"];
+ {exception,Func,Args} ->
+ [NameStr,"(Value) ->\n",
+ "try ",Func,"(Value",arg2str(Args),") of\n",
+ "_ -> true\n"
+ "catch throw:false -> false\n"
+ "end.\n\n"]
+ end].
-do_gen(_, asn1_NOVALUE) ->
+do_gen(_Gen, _, asn1_NOVALUE) ->
{literal,asn1_NOVALUE};
-do_gen(#'Externaltypereference'{module=M,type=T}, Default) ->
+do_gen(Gen, #'Externaltypereference'{module=M,type=T}, Default) ->
#typedef{typespec=#type{def=Td}} = asn1_db:dbget(M, T),
- do_gen(Td, Default);
-do_gen('BOOLEAN', Default) ->
+ do_gen(Gen, Td, Default);
+do_gen(_Gen, 'BOOLEAN', Default) ->
{literal,Default};
-do_gen({'BIT STRING',[]}, Default) ->
+do_gen(_Gen, {'BIT STRING',[]}, Default) ->
true = is_bitstring(Default), %Assertion.
case asn1ct:use_legacy_types() of
false ->
@@ -67,17 +73,17 @@ do_gen({'BIT STRING',[]}, Default) ->
true ->
{exception,need(check_legacy_bitstring, 2),[Default]}
end;
-do_gen({'BIT STRING',[_|_]=NBL}, Default) ->
+do_gen(_Gen, {'BIT STRING',[_|_]=NBL}, Default) ->
do_named_bitstring(NBL, Default);
-do_gen({'ENUMERATED',_}, Default) ->
+do_gen(_Gen, {'ENUMERATED',_}, Default) ->
{literal,Default};
-do_gen('INTEGER', Default) ->
+do_gen(_Gen, 'INTEGER', Default) ->
{literal,Default};
-do_gen({'INTEGER',NNL}, Default) ->
+do_gen(_Gen, {'INTEGER',NNL}, Default) ->
{exception,need(check_int, 3),[Default,NNL]};
-do_gen('NULL', Default) ->
+do_gen(_Gen, 'NULL', Default) ->
{literal,Default};
-do_gen('OCTET STRING', Default) ->
+do_gen(_Gen, 'OCTET STRING', Default) ->
true = is_binary(Default), %Assertion.
case asn1ct:use_legacy_types() of
false ->
@@ -85,34 +91,34 @@ do_gen('OCTET STRING', Default) ->
true ->
{exception,need(check_octetstring, 2),[Default]}
end;
-do_gen('OBJECT IDENTIFIER', Default0) ->
+do_gen(_Gen, 'OBJECT IDENTIFIER', Default0) ->
Default = pre_process_oid(Default0),
{exception,need(check_objectidentifier, 2),[Default]};
-do_gen({'CHOICE',Cs}, Default) ->
+do_gen(Gen, {'CHOICE',Cs}, Default) ->
{Tag,Value} = Default,
[Type] = [Type || #'ComponentType'{name=T,typespec=Type} <- Cs,
T =:= Tag],
- case do_gen(Type#type.def, Value) of
+ case do_gen(Gen, Type#type.def, Value) of
{literal,Lit} ->
{literal,{Tag,Lit}};
{exception,Func0,Args} ->
Key = {Tag,Func0,Args},
- Gen = fun(Fd, Name) ->
- S = gen_choice(Name, Tag, Func0, Args),
- ok = file:write(Fd, S)
+ DoGen = fun(Fd, Name) ->
+ S = gen_choice(Name, Tag, Func0, Args),
+ ok = file:write(Fd, S)
end,
- Func = asn1ct_func:call_gen("is_default_choice", Key, Gen),
+ Func = asn1ct_func:call_gen("is_default_choice", Key, DoGen),
{exception,atom_to_list(Func),[]}
end;
-do_gen(#'SEQUENCE'{components=Cs}, Default) ->
- do_seq_set(Cs, Default);
-do_gen({'SEQUENCE OF',Type}, Default) ->
- do_sof(Type, Default);
-do_gen(#'SET'{components=Cs}, Default) ->
- do_seq_set(Cs, Default);
-do_gen({'SET OF',Type}, Default) ->
- do_sof(Type, Default);
-do_gen(Type, Default) ->
+do_gen(Gen, #'SEQUENCE'{components=Cs}, Default) ->
+ do_seq_set(Gen, Cs, Default);
+do_gen(Gen, {'SEQUENCE OF',Type}, Default) ->
+ do_sof(Gen, Type, Default);
+do_gen(Gen, #'SET'{components=Cs}, Default) ->
+ do_seq_set(Gen, Cs, Default);
+do_gen(Gen, {'SET OF',Type}, Default) ->
+ do_sof(Gen, Type, Default);
+do_gen(_Gen, Type, Default) ->
case asn1ct_gen:unify_if_string(Type) of
restrictedstring ->
{exception,need(check_restrictedstring, 2),[Default]};
@@ -136,39 +142,58 @@ do_named_bitstring(_, Default) when is_bitstring(Default) ->
end,
{exception,need(Func, 3),[Default,bit_size(Default)]}.
-do_seq_set(Cs0, Default) ->
+do_seq_set(#gen{pack=record}=Gen, Cs0, Default) ->
Tag = element(1, Default),
Cs1 = [T || #'ComponentType'{typespec=T} <- Cs0],
- Cs = components(Cs1, tl(tuple_to_list(Default))),
+ Cs = components(Gen, Cs1, tl(tuple_to_list(Default))),
case are_all_literals(Cs) of
true ->
Literal = list_to_tuple([Tag|[L || {literal,L} <- Cs]]),
{literal,Literal};
false ->
Key = {Cs,Default},
- Gen = fun(Fd, Name) ->
- S = gen_components(Name, Tag, Cs),
- ok = file:write(Fd, S)
- end,
- Func = asn1ct_func:call_gen("is_default_cs_", Key, Gen),
+ DoGen = fun(Fd, Name) ->
+ S = gen_components(Name, Tag, Cs),
+ ok = file:write(Fd, S)
+ end,
+ Func = asn1ct_func:call_gen("is_default_cs_", Key, DoGen),
+ {exception,atom_to_list(Func),[]}
+ end;
+do_seq_set(#gen{pack=map}=Gen, Cs0, Default) ->
+ Cs1 = [{N,T} || #'ComponentType'{name=N,typespec=T} <- Cs0],
+ Cs = map_components(Gen, Cs1, Default),
+ AllLiterals = lists:all(fun({_,{literal,_}}) -> true;
+ ({_,_}) -> false
+ end, Cs),
+ case AllLiterals of
+ true ->
+ L = [{Name,Lit} || {Name,{literal,Lit}} <- Cs],
+ {literal,maps:from_list(L)};
+ false ->
+ Key = {Cs,Default},
+ DoGen = fun(Fd, Name) ->
+ S = gen_map_components(Name, Cs),
+ ok = file:write(Fd, S)
+ end,
+ Func = asn1ct_func:call_gen("is_default_cs_", Key, DoGen),
{exception,atom_to_list(Func),[]}
end.
-do_sof(Type, Default0) ->
+do_sof(Gen, Type, Default0) ->
Default = lists:sort(Default0),
Cs0 = lists:duplicate(length(Default), Type),
- Cs = components(Cs0, Default),
+ Cs = components(Gen, Cs0, Default),
case are_all_literals(Cs) of
true ->
Literal = [Lit || {literal,Lit} <- Cs],
{exception,need(check_literal_sof, 2),[Literal]};
false ->
Key = Cs,
- Gen = fun(Fd, Name) ->
- S = gen_sof(Name, Cs),
- ok = file:write(Fd, S)
+ DoGen = fun(Fd, Name) ->
+ S = gen_sof(Name, Cs),
+ ok = file:write(Fd, S)
end,
- Func = asn1ct_func:call_gen("is_default_sof", Key, Gen),
+ Func = asn1ct_func:call_gen("is_default_sof", Key, DoGen),
{exception,atom_to_list(Func),[]}
end.
@@ -199,6 +224,39 @@ gen_cs_2([], _) ->
"throw(false)\n"
"end.\n"].
+gen_map_components(Name, Cs) ->
+ [atom_to_list(Name),"(Value) ->\n",
+ "case Value of\n",
+ "#{"|gen_map_cs_1(Cs, 1, "", [])].
+
+gen_map_cs_1([{Name,{literal,Lit}}|T], I, Sep, Acc) ->
+ Var = "E"++integer_to_list(I),
+ G = Var ++ " =:= " ++ term2str(Lit),
+ [Sep,term2str(Name),":=",Var|
+ gen_map_cs_1(T, I+1, ",\n", [{guard,G}|Acc])];
+gen_map_cs_1([{Name,Exc}|T], I, Sep, Acc) ->
+ Var = "E"++integer_to_list(I),
+ [Sep,term2str(Name),":=",Var|
+ gen_map_cs_1(T, I+1, ",\n", [{exc,{Var,Exc}}|Acc])];
+gen_map_cs_1([], _, _, Acc) ->
+ G = lists:join(", ", [S || {guard,S} <- Acc]),
+ Exc = [E || {exc,E} <- Acc],
+ Body = gen_map_cs_2(Exc, ""),
+ case G of
+ [] ->
+ ["} ->\n"|Body];
+ [_|_] ->
+ ["} when ",G," ->\n"|Body]
+ end.
+
+gen_map_cs_2([{Var,{exception,Func,Args}}|T], Sep) ->
+ [Sep,Func,"(",Var,arg2str(Args),")"|gen_map_cs_2(T, ",\n")];
+gen_map_cs_2([], _) ->
+ [";\n",
+ "_ ->\n"
+ "throw(false)\n"
+ "end.\n"].
+
gen_sof(Name, Cs) ->
[atom_to_list(Name),"(Value) ->\n",
"case length(Value) of\n",
@@ -221,9 +279,18 @@ gen_sof_1([{exception,Func,Args}|Cs], I) ->
gen_sof_1([], _) ->
".\n".
-components([#type{def=Def}|Ts], [V|Vs]) ->
- [do_gen(Def, V)|components(Ts, Vs)];
-components([], []) -> [].
+components(Gen, [#type{def=Def}|Ts], [V|Vs]) ->
+ [do_gen(Gen, Def, V)|components(Gen, Ts, Vs)];
+components(_Gen, [], []) -> [].
+
+map_components(Gen, [{Name,#type{def=Def}}|Ts], Value) ->
+ case maps:find(Name, Value) of
+ {ok,V} ->
+ [{Name,do_gen(Gen, Def, V)}|map_components(Gen, Ts, Value)];
+ error ->
+ map_components(Gen, Ts, Value)
+ end;
+map_components(_Gen, [], _Value) -> [].
gen_choice(Name, Tag, Func, Args) ->
NameStr = atom_to_list(Name),
diff --git a/lib/asn1/src/asn1ct_gen_per.erl b/lib/asn1/src/asn1ct_gen_per.erl
index aa7223904e..9671a566bf 100644
--- a/lib/asn1/src/asn1ct_gen_per.erl
+++ b/lib/asn1/src/asn1ct_gen_per.erl
@@ -113,11 +113,7 @@ gen_encode_prim(Erules, D) ->
Value = {var,atom_to_list(asn1ct_gen:mk_var(asn1ct_name:curr(val)))},
gen_encode_prim(Erules, D, Value).
-gen_encode_prim(Erules, #type{}=D, Value) ->
- Aligned = case Erules of
- uper -> false;
- per -> true
- end,
+gen_encode_prim(#gen{erule=per,aligned=Aligned}, #type{}=D, Value) ->
Imm = gen_encode_prim_imm(Value, D, Aligned),
asn1ct_imm:enc_cg(Imm, Aligned).
@@ -284,11 +280,7 @@ gen_dec_external(Ext, BytesVar) ->
_ -> [{asis,Mod},":"]
end,{asis,dec_func(Type)},"(",BytesVar,")"]).
-gen_dec_imm(Erule, #type{def=Name,constraint=C}) ->
- Aligned = case Erule of
- uper -> false;
- per -> true
- end,
+gen_dec_imm(#gen{erule=per,aligned=Aligned}, #type{def=Name,constraint=C}) ->
gen_dec_imm_1(Name, C, Aligned).
gen_dec_imm_1('ASN1_OPEN_TYPE', Constraint, Aligned) ->
diff --git a/lib/asn1/src/asn1ct_imm.erl b/lib/asn1/src/asn1ct_imm.erl
index 8b96242c56..2ab848652e 100644
--- a/lib/asn1/src/asn1ct_imm.erl
+++ b/lib/asn1/src/asn1ct_imm.erl
@@ -37,9 +37,11 @@
per_enc_open_type/2,
per_enc_restricted_string/3,
per_enc_small_number/2]).
--export([per_enc_extension_bit/2,per_enc_extensions/4,per_enc_optional/3]).
+-export([per_enc_extension_bit/2,per_enc_extensions/4,
+ per_enc_extensions_map/4,
+ per_enc_optional/2]).
-export([per_enc_sof/5]).
--export([enc_absent/3,enc_append/1,enc_element/2]).
+-export([enc_absent/3,enc_append/1,enc_element/2,enc_maps_get/2]).
-export([enc_cg/2]).
-export([optimize_alignment/1,optimize_alignment/2,
dec_slim_cg/2,dec_code_gen/2]).
@@ -349,27 +351,32 @@ per_enc_extensions(Val0, Pos0, NumBits, Aligned) when NumBits > 0 ->
['_'|Length ++ PutBits]]}],
{var,"Extensions"}}].
-per_enc_optional(Val0, {Pos,DefVals}, _Aligned) when is_integer(Pos),
- is_list(DefVals) ->
- {B,Val} = enc_element(Pos, Val0),
+per_enc_extensions_map(Val0, Vars, Undefined, Aligned) ->
+ NumBits = length(Vars),
+ {B,[_Val,Bitmap]} = mk_vars(Val0, [bitmap]),
+ Length = per_enc_small_length(NumBits, Aligned),
+ PutBits = case NumBits of
+ 1 -> [{put_bits,1,1,[1]}];
+ _ -> [{put_bits,Bitmap,NumBits,[1]}]
+ end,
+ BitmapExpr = extensions_bitmap(Vars, Undefined),
+ B++[{assign,Bitmap,BitmapExpr},
+ {list,[{'cond',[[{eq,Bitmap,0}],
+ ['_'|Length ++ PutBits]]}],
+ {var,"Extensions"}}].
+
+per_enc_optional(Val, DefVals) when is_list(DefVals) ->
Zero = {put_bits,0,1,[1]},
One = {put_bits,1,1,[1]},
- B++[{'cond',
- [[{eq,Val,DefVal},Zero] || DefVal <- DefVals] ++ [['_',One]]}];
-per_enc_optional(Val0, {Pos,{call,M,F,A}}, _Aligned) when is_integer(Pos) ->
- {B,Val} = enc_element(Pos, Val0),
+ [{'cond',
+ [[{eq,Val,DefVal},Zero] || DefVal <- DefVals] ++ [['_',One]]}];
+per_enc_optional(Val, {call,M,F,A}) ->
{[],[[],Tmp]} = mk_vars([], [tmp]),
Zero = {put_bits,0,1,[1]},
One = {put_bits,1,1,[1]},
- B++[{call,M,F,[Val|A],Tmp},
- {'cond',
- [[{eq,Tmp,true},Zero],['_',One]]}];
-per_enc_optional(Val0, Pos, _Aligned) when is_integer(Pos) ->
- {B,Val} = enc_element(Pos, Val0),
- Zero = {put_bits,0,1,[1]},
- One = {put_bits,1,1,[1]},
- B++[{'cond',[[{eq,Val,asn1_NOVALUE},Zero],
- ['_',One]]}].
+ [{call,M,F,[Val|A],Tmp},
+ {'cond',
+ [[{eq,Tmp,true},Zero],['_',One]]}].
per_enc_sof(Val0, Constraint, ElementVar, ElementImm, Aligned) ->
{B,[Val,Len]} = mk_vars(Val0, [len]),
@@ -423,6 +430,13 @@ enc_element(N, Val0) ->
{[],[Val,Dst]} = mk_vars(Val0, [element]),
{[{call,erlang,element,[N,Val],Dst}],Dst}.
+enc_maps_get(N, Val0) ->
+ {[],[Val,Dst0]} = mk_vars(Val0, [element]),
+ {var,Dst} = Dst0,
+ DstExpr = {expr,lists:concat(["#{",N,":=",Dst,"}"])},
+ {var,SrcVar} = Val,
+ {[{assign,DstExpr,SrcVar}],Dst0}.
+
enc_cg(Imm0, false) ->
Imm1 = enc_cse(Imm0),
Imm2 = enc_pre_cg(Imm1),
@@ -1240,6 +1254,20 @@ enc_length(Len, {Lb,Ub}, Aligned) when is_integer(Lb) ->
enc_length(Len, Sv, _Aligned) when is_integer(Sv) ->
[{'cond',[[{eq,Len,Sv}]]}].
+extensions_bitmap(Vs, Undefined) ->
+ Highest = 1 bsl (length(Vs)-1),
+ Cs = extensions_bitmap_1(Vs, Undefined, Highest),
+ lists:flatten(lists:join(" bor ", Cs)).
+
+extensions_bitmap_1([{var,V}|Vs], Undefined, Power) ->
+ S = ["case ",V," of\n",
+ " ",Undefined," -> 0;\n"
+ " _ -> ",integer_to_list(Power),"\n"
+ "end"],
+ [S|extensions_bitmap_1(Vs, Undefined, Power bsr 1)];
+extensions_bitmap_1([], _, _) ->
+ [].
+
put_bits_binary(Bin, _Unit, Aligned) when is_binary(Bin) ->
Sz = byte_size(Bin),
<<Int:Sz/unit:8>> = Bin,
diff --git a/lib/asn1/src/asn1ct_value.erl b/lib/asn1/src/asn1ct_value.erl
index b3d41dd9f3..8bd99d995b 100644
--- a/lib/asn1/src/asn1ct_value.erl
+++ b/lib/asn1/src/asn1ct_value.erl
@@ -64,7 +64,11 @@ from_type(M,Typename,Type) when is_record(Type,type) ->
end;
{constructed,bif} when Typename == ['EXTERNAL'] ->
Val=from_type_constructed(M,Typename,InnerType,Type),
- asn1ct_eval_ext:transform_to_EXTERNAL1994(Val);
+ T = case M:maps() of
+ false -> transform_to_EXTERNAL1994;
+ true -> transform_to_EXTERNAL1994_maps
+ end,
+ asn1ct_eval_ext:T(Val);
{constructed,bif} ->
from_type_constructed(M,Typename,InnerType,Type)
end;
@@ -118,11 +122,13 @@ get_sequence(M,Typename,Type) ->
#'SEQUENCE'{components=Cl} -> {'SEQUENCE',Cl};
#'SET'{components=Cl} -> {'SET',to_textual_order(Cl)}
end,
- case get_components(M,Typename,CompList) of
- [] ->
- {list_to_atom(asn1ct_gen:list2rname(Typename))};
- C ->
- list_to_tuple([list_to_atom(asn1ct_gen:list2rname(Typename))|C])
+ Cs = get_components(M, Typename, CompList),
+ case M:maps() of
+ false ->
+ RecordTag = list_to_atom(asn1ct_gen:list2rname(Typename)),
+ list_to_tuple([RecordTag|[Val || {_,Val} <- Cs]]);
+ true ->
+ maps:from_list(Cs)
end.
get_components(M,Typename,{Root,Ext}) ->
@@ -130,9 +136,9 @@ get_components(M,Typename,{Root,Ext}) ->
%% Should enhance this *** HERE *** with proper handling of extensions
-get_components(M,Typename,[H|T]) ->
- [from_type(M,Typename,H)|
- get_components(M,Typename,T)];
+get_components(M, Typename, [H|T]) ->
+ #'ComponentType'{name=Name} = H,
+ [{Name,from_type(M, Typename, H)}|get_components(M, Typename, T)];
get_components(_,_,[]) ->
[].
diff --git a/lib/asn1/src/asn1rtt_ext.erl b/lib/asn1/src/asn1rtt_ext.erl
index 3bf01823db..161b2db691 100644
--- a/lib/asn1/src/asn1rtt_ext.erl
+++ b/lib/asn1/src/asn1rtt_ext.erl
@@ -19,7 +19,8 @@
%%
-module(asn1rtt_ext).
--export([transform_to_EXTERNAL1990/1,transform_to_EXTERNAL1994/1]).
+-export([transform_to_EXTERNAL1990/1,transform_to_EXTERNAL1990_maps/1,
+ transform_to_EXTERNAL1994/1,transform_to_EXTERNAL1994_maps/1]).
transform_to_EXTERNAL1990({_,_,_,_}=Val) ->
transform_to_EXTERNAL1990(tuple_to_list(Val), []);
@@ -51,6 +52,30 @@ transform_to_EXTERNAL1990([Data_value], Acc)
list_to_tuple(lists:reverse([{'octet-aligned',Data_value}|Acc])).
+transform_to_EXTERNAL1990_maps(#{identification:=Id,'data-value':=Value}=V) ->
+ M0 = case Id of
+ {syntax,DRef} ->
+ #{'direct-reference'=>DRef};
+ {'presentation-context-id',IndRef} ->
+ #{'indirect-reference'=>IndRef};
+ {'context-negotiation',
+ #{'presentation-context-id':=IndRef,
+ 'transfer-syntax':=DRef}} ->
+ #{'direct-reference'=>DRef,
+ 'indirect-reference'=>IndRef}
+ end,
+ M = case V of
+ #{'data-value-descriptor':=Dvd} ->
+ M0#{'data-value-descriptor'=>Dvd};
+ #{} ->
+ M0
+ end,
+ M#{encoding=>{'octet-aligned',Value}};
+transform_to_EXTERNAL1990_maps(#{encoding:=_}=V) ->
+ %% Already in the EXTERNAL 1990 format.
+ V.
+
+
transform_to_EXTERNAL1994({'EXTERNAL',DRef,IndRef,Data_v_desc,Encoding}=V) ->
Identification =
case {DRef,IndRef} of
@@ -71,3 +96,38 @@ transform_to_EXTERNAL1994({'EXTERNAL',DRef,IndRef,Data_v_desc,Encoding}=V) ->
%% information.
V
end.
+
+transform_to_EXTERNAL1994_maps(V0) ->
+ Identification =
+ case V0 of
+ #{'direct-reference':=DRef,
+ 'indirect-reference':=asn1_NOVALUE} ->
+ {syntax,DRef};
+ #{'direct-reference':=asn1_NOVALUE,
+ 'indirect-reference':=IndRef} ->
+ {'presentation-context-id',IndRef};
+ #{'direct-reference':=DRef,
+ 'indirect-reference':=IndRef} ->
+ {'context-negotiation',
+ #{'transfer-syntax'=>DRef,
+ 'presentation-context-id'=>IndRef}}
+ end,
+ case V0 of
+ #{encoding:={'octet-aligned',Val}}
+ when is_list(Val); is_binary(Val) ->
+ %% Transform to the EXTERNAL 1994 definition.
+ V = #{identification=>Identification,
+ 'data-value'=>Val},
+ case V0 of
+ #{'data-value-descriptor':=asn1_NOVALUE} ->
+ V;
+ #{'data-value-descriptor':=Dvd} ->
+ V#{'data-value-descriptor'=>Dvd}
+ end;
+ _ ->
+ %% Keep the EXTERNAL 1990 definition to avoid losing
+ %% information.
+ V = [{K,V} || {K,V} <- maps:to_list(V0),
+ V =/= asn1_NOVALUE],
+ maps:from_list(V)
+ end.
diff --git a/lib/asn1/test/Makefile b/lib/asn1/test/Makefile
index 40575e8a2f..afd063aa8e 100644
--- a/lib/asn1/test/Makefile
+++ b/lib/asn1/test/Makefile
@@ -82,6 +82,7 @@ MODULES= \
testInfObjExtract \
testParameterizedInfObj \
testFragmented \
+ testMaps \
testMergeCompile \
testMultipleLevels \
testDeepTConstr \
@@ -114,8 +115,7 @@ MODULES= \
testImporting \
testExtensibilityImplied \
asn1_test_lib \
- asn1_app_test \
- asn1_appup_test \
+ asn1_app_SUITE \
asn1_SUITE \
error_SUITE \
syntax_SUITE
diff --git a/lib/asn1/test/asn1_SUITE.erl b/lib/asn1/test/asn1_SUITE.erl
index b6430134ab..580c919b9d 100644
--- a/lib/asn1/test/asn1_SUITE.erl
+++ b/lib/asn1/test/asn1_SUITE.erl
@@ -21,6 +21,9 @@
-module(asn1_SUITE).
+%% Suppress compilation of an addititional module compiled for maps.
+-define(NO_MAPS_MODULE, asn1_test_lib_no_maps).
+
-define(only_ber(Func),
if Rule =:= ber -> Func;
true -> ok
@@ -39,10 +42,11 @@ suite() ->
{timetrap,{minutes,60}}].
all() ->
- [{group, compile},
+ [xref,
+ xref_export_all,
+
+ {group, compile},
{group, parallel},
- {group, app_test},
- {group, appup_test},
% TODO: Investigate parallel running of these:
testComment,
@@ -64,13 +68,8 @@ groups() ->
ber_optional,
tagdefault_automatic]},
- {app_test, [], [{asn1_app_test, all}]},
-
- {appup_test, [], [{asn1_appup_test, all}]},
-
{parallel, Parallel,
[cover,
- xref,
{group, ber},
% Uses 'P-Record', 'Constraints', 'MEDIA-GATEWAY-CONTROL'...
{group, [], [parse,
@@ -102,6 +101,7 @@ groups() ->
testMultipleLevels,
testOpt,
testSeqDefault,
+ testMaps,
% Uses 'External'
{group, [], [testExternal,
testSeqExtension]},
@@ -176,8 +176,11 @@ groups() ->
{performance, [],
[testTimer_ber,
+ testTimer_ber_maps,
testTimer_per,
- testTimer_uper]}].
+ testTimer_per_maps,
+ testTimer_uper,
+ testTimer_uper_maps]}].
%%------------------------------------------------------------------------------
%% Init/end
@@ -441,6 +444,16 @@ testDEFAULT(Config, Rule, Opts) ->
testDef:main(Rule),
testSeqSetDefaultVal:main(Rule, Opts).
+testMaps(Config) ->
+ test(Config, fun testMaps/3,
+ [{ber,[maps,no_ok_wrapper]},
+ {ber,[maps,der,no_ok_wrapper]},
+ {per,[maps,no_ok_wrapper]},
+ {uper,[maps,no_ok_wrapper]}]).
+testMaps(Config, Rule, Opts) ->
+ asn1_test_lib:compile_all(['Maps'], Config, [Rule|Opts]),
+ testMaps:main(Rule).
+
testOpt(Config) -> test(Config, fun testOpt/3).
testOpt(Config, Rule, Opts) ->
asn1_test_lib:compile("Opt", Config, [Rule|Opts]),
@@ -614,12 +627,12 @@ parse(Config) ->
[asn1_test_lib:compile(M, Config, [abs]) || M <- test_modules()].
per(Config) ->
- test(Config, fun per/3, [per,uper]).
+ test(Config, fun per/3, [per,uper,{per,[maps]},{uper,[maps]}]).
per(Config, Rule, Opts) ->
[module_test(M, Config, Rule, Opts) || M <- per_modules()].
ber_other(Config) ->
- test(Config, fun ber_other/3, [ber]).
+ test(Config, fun ber_other/3, [ber,{ber,[maps]}]).
ber_other(Config, Rule, Opts) ->
[module_test(M, Config, Rule, Opts) || M <- ber_modules()].
@@ -628,7 +641,7 @@ der(Config) ->
asn1_test_lib:compile_all(ber_modules(), Config, [der]).
module_test(M0, Config, Rule, Opts) ->
- asn1_test_lib:compile(M0, Config, [Rule|Opts]),
+ asn1_test_lib:compile(M0, Config, [Rule,?NO_MAPS_MODULE|Opts]),
case list_to_atom(M0) of
'LDAP' ->
%% Because of the recursive definition of 'Filter' in
@@ -995,7 +1008,9 @@ testS1AP(Config, Rule, Opts) ->
testRfcs() ->
[{timetrap,{minutes,90}}].
-testRfcs(Config) -> test(Config, fun testRfcs/3, [{ber,[der]}]).
+testRfcs(Config) -> test(Config, fun testRfcs/3,
+ [{ber,[der,?NO_MAPS_MODULE]},
+ {ber,[der,maps]}]).
testRfcs(Config, Rule, Opts) ->
case erlang:system_info(system_architecture) of
"sparc-sun-solaris2.10" ->
@@ -1010,7 +1025,8 @@ test_compile_options(Config) ->
ok = test_compile_options:path(Config),
ok = test_compile_options:noobj(Config),
ok = test_compile_options:record_name_prefix(Config),
- ok = test_compile_options:verbose(Config).
+ ok = test_compile_options:verbose(Config),
+ ok = test_compile_options:maps(Config).
testDoubleEllipses(Config) -> test(Config, fun testDoubleEllipses/3).
testDoubleEllipses(Config, Rule, Opts) ->
@@ -1027,18 +1043,6 @@ test_modified_x420(Config, Rule, Opts) ->
test_modified_x420:test(Config).
-testX420() ->
- [{timetrap,{minutes,90}}].
-testX420(Config) ->
- case erlang:system_info(system_architecture) of
- "sparc-sun-solaris2.10" ->
- {skip,"Too slow for an old Sparc"};
- _ ->
- Rule = ber,
- testX420:compile(Rule, [der], Config),
- ok = testX420:ticket7759(Rule, Config)
- end.
-
test_x691(Config) ->
test(Config, fun test_x691/3, [per, uper]).
test_x691(Config, Rule, Opts) ->
@@ -1069,7 +1073,7 @@ test_x691(Config, Rule, Opts) ->
ok.
ticket_6143(Config) ->
- ok = test_compile_options:ticket_6143(Config).
+ asn1_test_lib:compile("AA1", Config, [?NO_MAPS_MODULE]).
testExtensionAdditionGroup(Config) ->
test(Config, fun testExtensionAdditionGroup/3).
@@ -1157,20 +1161,33 @@ END
ok = asn1ct:compile(File, [{outdir, PrivDir}]).
-timer_compile(Config, Rule) ->
- asn1_test_lib:compile_all(["H235-SECURITY-MESSAGES", "H323-MESSAGES"],
- Config, [no_ok_wrapper,Rule]).
+timer_compile(Config, Opts0) ->
+ Files = ["H235-SECURITY-MESSAGES", "H323-MESSAGES"],
+ Opts = [no_ok_wrapper,?NO_MAPS_MODULE|Opts0],
+ asn1_test_lib:compile_all(Files, Config, Opts).
testTimer_ber(Config) ->
- timer_compile(Config, ber),
+ timer_compile(Config, [ber]),
testTimer:go().
testTimer_per(Config) ->
- timer_compile(Config, per),
+ timer_compile(Config, [per]),
testTimer:go().
testTimer_uper(Config) ->
- timer_compile(Config, uper),
+ timer_compile(Config, [uper]),
+ testTimer:go().
+
+testTimer_ber_maps(Config) ->
+ timer_compile(Config, [ber,maps]),
+ testTimer:go().
+
+testTimer_per_maps(Config) ->
+ timer_compile(Config, [per,maps]),
+ testTimer:go().
+
+testTimer_uper_maps(Config) ->
+ timer_compile(Config, [uper,maps]),
testTimer:go().
%% Test of multiple-line comment, OTP-8043
@@ -1179,9 +1196,11 @@ testComment(Config) ->
asn1_test_lib:roundtrip('Comment', 'Seq', {'Seq',12,true}).
testName2Number(Config) ->
- N2NOptions = [{n2n,Type} || Type <- ['CauseMisc', 'CauseProtocol',
- 'CauseRadioNetwork',
- 'CauseTransport','CauseNas']],
+ N2NOptions0 = [{n2n,Type} ||
+ Type <- ['CauseMisc', 'CauseProtocol',
+ 'CauseRadioNetwork',
+ 'CauseTransport','CauseNas']],
+ N2NOptions = [?NO_MAPS_MODULE|N2NOptions0],
asn1_test_lib:compile("S1AP-IEs", Config, N2NOptions),
0 = 'S1AP-IEs':name2num_CauseMisc('control-processing-overload'),
@@ -1191,8 +1210,9 @@ testName2Number(Config) ->
%% Test that n2n option generates name2num and num2name functions supporting
%% values not within the extension root if the enumeration type has an
%% extension marker.
- N2NOptionsExt = [{n2n, 'NoExt'}, {n2n, 'Ext'}, {n2n, 'Ext2'}],
+ N2NOptionsExt = [?NO_MAPS_MODULE,{n2n,'NoExt'},{n2n,'Ext'},{n2n,'Ext2'}],
asn1_test_lib:compile("EnumN2N", Config, N2NOptionsExt),
+
%% Previously, name2num and num2name was not generated if the type didn't
%% have an extension marker:
0 = 'EnumN2N':name2num_NoExt('blue'),
@@ -1210,9 +1230,11 @@ testName2Number(Config) ->
ok.
ticket_7407(Config) ->
- asn1_test_lib:compile("EUTRA-extract-7407", Config, [uper]),
+ Opts = [uper,?NO_MAPS_MODULE],
+ asn1_test_lib:compile("EUTRA-extract-7407", Config, Opts),
ticket_7407_code(true),
- asn1_test_lib:compile("EUTRA-extract-7407", Config, [uper,no_final_padding]),
+ asn1_test_lib:compile("EUTRA-extract-7407", Config,
+ [no_final_padding|Opts]),
ticket_7407_code(false).
ticket_7407_code(FinalPadding) ->
@@ -1287,16 +1309,72 @@ ticket7904(Config) ->
{ok,_} = 'RANAPextract1':encode('InitiatingMessage', Val1),
{ok,_} = 'RANAPextract1':encode('InitiatingMessage', Val1).
+
+%% Make sure that functions exported from other modules are
+%% actually used.
+
xref(_Config) ->
- xref:start(s),
- xref:set_default(s, [{verbose,false},{warnings,false},{builtins,true}]),
+ S = ?FUNCTION_NAME,
+ xref:start(S),
+ xref:set_default(S, [{verbose,false},{warnings,false},{builtins,true}]),
Test = filename:dirname(code:which(?MODULE)),
- {ok,_PMs} = xref:add_directory(s, Test),
- UnusedExports = "X - XU - asn1_appup_test - asn1_app_test - \".*_SUITE\" : Mod",
- case xref:q(s, UnusedExports) of
+ {ok,_PMs} = xref:add_directory(S, Test),
+ Q = "X - XU - \".*_SUITE\" : Mod",
+ UnusedExports = xref:q(S, Q),
+ xref:stop(S),
+ case UnusedExports of
{ok,[]} ->
ok;
{ok,[_|_]=Res} ->
io:format("Exported, but unused: ~p\n", [Res]),
?t:fail()
end.
+
+%% Ensure that all functions that are implicitly exported by
+%% 'export_all' in this module are actually used.
+
+xref_export_all(_Config) ->
+ S = ?FUNCTION_NAME,
+ xref:start(S),
+ xref:set_default(S, [{verbose,false},{warnings,false},{builtins,true}]),
+ {ok,_PMs} = xref:add_module(S, code:which(?MODULE)),
+ AllCalled = all_called(),
+ Def = "Called := " ++ lists:flatten(io_lib:format("~p", [AllCalled])),
+ {ok,_} = xref:q(S, Def),
+ {ok,Unused} = xref:q(S, "X - Called - range (closure E | Called)"),
+ xref:stop(S),
+ case Unused of
+ [] ->
+ ok;
+ [_|_] ->
+ S = [io_lib:format("~p:~p/~p\n", [M,F,A]) || {M,F,A} <- Unused],
+ io:format("There are unused functions:\n\n~s\n", [S]),
+ ?t:fail(unused_functions)
+ end.
+
+%% Collect all functions that common_test will call in this module.
+
+all_called() ->
+ [{?MODULE,end_per_group,2},
+ {?MODULE,end_per_suite,1},
+ {?MODULE,end_per_testcase,2},
+ {?MODULE,init_per_group,2},
+ {?MODULE,init_per_suite,1},
+ {?MODULE,init_per_testcase,2},
+ {?MODULE,suite,0}] ++
+ all_called_1(all() ++ groups()).
+
+all_called_1([{_,_}|T]) ->
+ all_called_1(T);
+all_called_1([{_Name,_Flags,Fs}|T]) ->
+ all_called_1(Fs ++ T);
+all_called_1([F|T]) when is_atom(F) ->
+ L = case erlang:function_exported(?MODULE, F, 0) of
+ false ->
+ [{?MODULE,F,1}];
+ true ->
+ [{?MODULE,F,0},{?MODULE,F,1}]
+ end,
+ L ++ all_called_1(T);
+all_called_1([]) ->
+ [].
diff --git a/lib/asn1/test/asn1_SUITE_data/Maps.asn1 b/lib/asn1/test/asn1_SUITE_data/Maps.asn1
new file mode 100644
index 0000000000..fd5f373e45
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/Maps.asn1
@@ -0,0 +1,17 @@
+Maps DEFINITIONS AUTOMATIC TAGS ::=
+BEGIN
+
+XY ::= SEQUENCE { x INTEGER DEFAULT 0, y INTEGER DEFAULT 0 }
+
+xy1 XY ::= { x 42, y 17 }
+xy2 XY ::= { }
+xy3 XY ::= { y 999 }
+
+S ::= SEQUENCE {
+ xy XY DEFAULT { x 100, y 100 },
+ os OCTET STRING OPTIONAL
+}
+
+s1 S ::= {}
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1 b/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1
index 5fda19303a..e866ef2f4f 100644
--- a/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/SeqExtension.asn1
@@ -48,6 +48,17 @@ SeqExt6 ::= SEQUENCE
[[ i6 [106] INTEGER, i7 [107] INTEGER ]]
}
+SeqExt7 ::= SEQUENCE
+{
+ -- The spaces between the ellipsis and the comma will prevent them
+ -- from being removed.
+ ... ,
+ [[ a INTEGER (0..65535) OPTIONAL,
+ b OCTET STRING OPTIONAL,
+ c BOOLEAN
+ ]]
+}
+
SeqExt1X ::= XSeqExt1
SeqExt2X ::= XSeqExt2
diff --git a/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Discriptions.asn b/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Descriptions.asn
index b9be9934e4..12a4475422 100644
--- a/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Discriptions.asn
+++ b/lib/asn1/test/asn1_SUITE_data/nbapsystem/NBAP-PDU-Descriptions.asn
@@ -4,7 +4,7 @@
--
-- **************************************************************
-NBAP-PDU-Discriptions {
+NBAP-PDU-Descriptions {
itu-t (0) identified-organization (4) etsi (0) mobileDomain (0)
umts-Access (20) modules (3) nbap (2) version1 (1) nbap-PDU-Descriptions (0) }
diff --git a/lib/asn1/test/asn1_SUITE_data/test_records.erl b/lib/asn1/test/asn1_SUITE_data/test_records.erl
index 9fd07c1449..afb1c8c80b 100644
--- a/lib/asn1/test/asn1_SUITE_data/test_records.erl
+++ b/lib/asn1/test/asn1_SUITE_data/test_records.erl
@@ -25,7 +25,7 @@
-define(line,put(test_server_loc,{?MODULE,?LINE}),).
--include("NBAP-PDU-Discriptions.hrl").
+-include("NBAP-PDU-Descriptions.hrl").
-include("NBAP-PDU-Contents.hrl").
-include("NBAP-Containers.hrl").
-include("NBAP-CommonDataTypes.hrl").
diff --git a/lib/asn1/test/asn1_app_test.erl b/lib/asn1/test/asn1_app_SUITE.erl
index 028322f555..c089a7267c 100644
--- a/lib/asn1/test/asn1_app_test.erl
+++ b/lib/asn1/test/asn1_app_SUITE.erl
@@ -21,23 +21,24 @@
%%----------------------------------------------------------------------
%% Purpose: Verify the application specifics of the asn1 application
%%----------------------------------------------------------------------
--module(asn1_app_test).
-
--compile(export_all).
+-module(asn1_app_SUITE).
+-export([all/0,groups/0,init_per_group/2,end_per_group/2,
+ init_per_suite/1,end_per_suite/1,
+ appup/1,fields/1,modules/1,export_all/1,app_depend/1]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-all() ->
- [fields, modules, exportall, app_depend].
+all() ->
+ [appup, fields, modules, export_all, app_depend].
-groups() ->
+groups() ->
[].
init_per_group(_GroupName, Config) ->
- Config.
+ Config.
end_per_group(_GroupName, Config) ->
- Config.
+ Config.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -65,12 +66,15 @@ is_app(App) ->
end_per_suite(Config) when is_list(Config) ->
Config.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+appup(Config) when is_list(Config) ->
+ ok = test_server:appup_test(asn1).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% .
fields(Config) when is_list(Config) ->
- AppFile = key1search(app_file, Config),
+ AppFile = key1find(app_file, Config),
Fields = [vsn, description, modules, registered, applications],
case check_fields(Fields, AppFile, []) of
[] ->
@@ -96,10 +100,9 @@ check_field(Name, AppFile, Missing) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% .
modules(Config) when is_list(Config) ->
- AppFile = key1search(app_file, Config),
- Mods = key1search(modules, AppFile),
+ AppFile = key1find(app_file, Config),
+ Mods = key1find(modules, AppFile),
EbinList = get_ebin_mods(asn1),
case missing_modules(Mods, EbinList, []) of
[] ->
@@ -112,10 +115,9 @@ modules(Config) when is_list(Config) ->
ok;
Extra ->
check_asn1ct_modules(Extra)
-% throw({error, {extra_modules, Extra}})
end,
{ok, Mods}.
-
+
get_ebin_mods(App) ->
LibDir = code:lib_dir(App),
EbinDir = filename:join([LibDir,"ebin"]),
@@ -166,10 +168,9 @@ extra_modules(Mods, [Mod|Ebins], Extra) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% .
-exportall(Config) when is_list(Config) ->
- AppFile = key1search(app_file, Config),
- Mods = key1search(modules, AppFile),
+export_all(Config) when is_list(Config) ->
+ AppFile = key1find(app_file, Config),
+ Mods = key1find(modules, AppFile),
check_export_all(Mods).
@@ -180,10 +181,10 @@ check_export_all([Mod|Mods]) ->
{'EXIT', {undef, _}} ->
check_export_all(Mods);
O ->
- case lists:keysearch(options, 1, O) of
+ case lists:keyfind(options, 1, O) of
false ->
check_export_all(Mods);
- {value, {options, List}} ->
+ {options, List} ->
case lists:member(export_all, List) of
true ->
throw({error, {export_all, Mod}});
@@ -193,13 +194,12 @@ check_export_all([Mod|Mods]) ->
end
end.
-
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% .
app_depend(Config) when is_list(Config) ->
- AppFile = key1search(app_file, Config),
- Apps = key1search(applications, AppFile),
+ AppFile = key1find(app_file, Config),
+ Apps = key1find(applications, AppFile),
check_apps(Apps).
@@ -220,10 +220,10 @@ check_apps([App|Apps]) ->
fail(Reason) ->
exit({suite_failed, Reason}).
-key1search(Key, L) ->
- case lists:keysearch(Key, 1, L) of
- undefined ->
+key1find(Key, L) ->
+ case lists:keyfind(Key, 1, L) of
+ false ->
fail({not_found, Key, L});
- {value, {Key, Value}} ->
+ {Key, Value} ->
Value
end.
diff --git a/lib/asn1/test/asn1_appup_test.erl b/lib/asn1/test/asn1_appup_test.erl
deleted file mode 100644
index 54540e53cc..0000000000
--- a/lib/asn1/test/asn1_appup_test.erl
+++ /dev/null
@@ -1,58 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2005-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-%%
-%%----------------------------------------------------------------------
-%% Purpose: Verify the application specifics of the asn1 application
-%%----------------------------------------------------------------------
--module(asn1_appup_test).
--compile(export_all).
--include_lib("common_test/include/ct.hrl").
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-all() ->
- [appup].
-
-groups() ->
- [].
-
-init_per_group(_GroupName, Config) ->
- Config.
-
-end_per_group(_GroupName, Config) ->
- Config.
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-init_per_suite(Config) when is_list(Config) ->
- Config.
-
-
-end_per_suite(Config) when is_list(Config) ->
- Config.
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-appup() ->
- [{doc, "perform a simple check of the asn1 appup file"}].
-appup(Config) when is_list(Config) ->
- ok = ?t:appup_test(asn1).
diff --git a/lib/asn1/test/asn1_test_lib.erl b/lib/asn1/test/asn1_test_lib.erl
index dc614db4f2..a79958d229 100644
--- a/lib/asn1/test/asn1_test_lib.erl
+++ b/lib/asn1/test/asn1_test_lib.erl
@@ -25,7 +25,8 @@
hex_to_bin/1,
match_value/2,
parallel/0,
- roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4]).
+ roundtrip/3,roundtrip/4,roundtrip_enc/3,roundtrip_enc/4,
+ map_roundtrip/3]).
-include_lib("common_test/include/ct.hrl").
@@ -94,15 +95,58 @@ module(F0) ->
list_to_atom(F).
%% filename:join(CaseDir, F ++ ".beam").
-compile_file(File, Options) ->
+compile_file(File, Options0) ->
+ Options = [warnings_as_errors|Options0],
try
- ok = asn1ct:compile(File, [warnings_as_errors|Options])
+ ok = asn1ct:compile(File, Options),
+ ok = compile_maps(File, Options)
catch
_:Reason ->
ct:print("Failed to compile ~s\n~p", [File,Reason]),
error
end.
+compile_maps(File, Options) ->
+ unload_map_mod(File),
+ Incompat = [abs,compact_bit_string,legacy_bit_string,
+ legacy_erlang_types,maps,asn1_test_lib_no_maps],
+ case lists:any(fun(E) -> lists:member(E, Incompat) end, Options) of
+ true ->
+ ok;
+ false ->
+ compile_maps_1(File, Options)
+ end.
+
+compile_maps_1(File, Options) ->
+ ok = asn1ct:compile(File, [maps,no_ok_wrapper,noobj|Options]),
+ OutDir = proplists:get_value(outdir, Options),
+ Base0 = filename:rootname(filename:basename(File)),
+ Base = case filename:extension(Base0) of
+ ".set" ->
+ filename:rootname(Base0);
+ _ ->
+ Base0
+ end,
+ ErlBase = Base ++ ".erl",
+ ErlFile = filename:join(OutDir, ErlBase),
+ {ok,Erl0} = file:read_file(ErlFile),
+ Erl = re:replace(Erl0, <<"-module\\('">>, "&maps_"),
+ MapsErlFile = filename:join(OutDir, "maps_" ++ ErlBase),
+ ok = file:write_file(MapsErlFile, Erl),
+ {ok,_} = compile:file(MapsErlFile, [report,{outdir,OutDir},{i,OutDir}]),
+ ok.
+
+unload_map_mod(File0) ->
+ File1 = filename:basename(File0),
+ File2 = filename:rootname(File1, ".asn"),
+ File3 = filename:rootname(File2, ".asn1"),
+ File4 = filename:rootname(File3, ".py"),
+ File = filename:rootname(File4, ".set"),
+ MapMod = list_to_atom("maps_"++File),
+ code:delete(MapMod),
+ code:purge(MapMod),
+ ok.
+
compile_erlang(Mod, Config, Options) ->
DataDir = proplists:get_value(data_dir, Config),
CaseDir = proplists:get_value(case_dir, Config),
@@ -147,24 +191,60 @@ roundtrip(Mod, Type, Value) ->
roundtrip(Mod, Type, Value, Value).
roundtrip(Mod, Type, Value, ExpectedValue) ->
- {ok,Encoded} = Mod:encode(Type, Value),
- {ok,ExpectedValue} = Mod:decode(Type, Encoded),
- test_ber_indefinite(Mod, Type, Encoded, ExpectedValue),
- ok.
+ roundtrip_enc(Mod, Type, Value, ExpectedValue).
roundtrip_enc(Mod, Type, Value) ->
roundtrip_enc(Mod, Type, Value, Value).
roundtrip_enc(Mod, Type, Value, ExpectedValue) ->
- {ok,Encoded} = Mod:encode(Type, Value),
- {ok,ExpectedValue} = Mod:decode(Type, Encoded),
+ case Mod:encode(Type, Value) of
+ {ok,Encoded} ->
+ {ok,ExpectedValue} = Mod:decode(Type, Encoded);
+ Encoded when is_binary(Encoded) ->
+ ExpectedValue = Mod:decode(Type, Encoded)
+ end,
+ map_roundtrip(Mod, Type, Encoded),
test_ber_indefinite(Mod, Type, Encoded, ExpectedValue),
Encoded.
+map_roundtrip(Mod, Type, Encoded) ->
+ MapMod = list_to_atom("maps_"++atom_to_list(Mod)),
+ try MapMod:maps() of
+ true ->
+ map_roundtrip_1(MapMod, Type, Encoded)
+ catch
+ error:undef ->
+ ok
+ end.
+
%%%
%%% Internal functions.
%%%
+map_roundtrip_1(Mod, Type, Encoded) ->
+ Decoded = Mod:decode(Type, Encoded),
+ case Mod:encode(Type, Decoded) of
+ Encoded ->
+ ok;
+ OtherEncoding ->
+ case is_named_bitstring(Decoded) of
+ true ->
+ %% In BER, named BIT STRINGs with different number of
+ %% trailing zeroes decode to the same value.
+ ok;
+ false ->
+ error({encode_mismatch,Decoded,Encoded,OtherEncoding})
+ end
+ end,
+ ok.
+
+is_named_bitstring([H|T]) ->
+ is_atom(H) andalso is_named_bitstring(T);
+is_named_bitstring([]) ->
+ true;
+is_named_bitstring(_) ->
+ false.
+
hex2num(C) when $0 =< C, C =< $9 -> C - $0;
hex2num(C) when $A =< C, C =< $F -> C - $A + 10;
hex2num(C) when $a =< C, C =< $f -> C - $a + 10.
@@ -179,7 +259,12 @@ test_ber_indefinite(Mod, Type, Encoded, ExpectedValue) ->
case Mod:encoding_rule() of
ber ->
Indefinite = iolist_to_binary(ber_indefinite(Encoded)),
- {ok,ExpectedValue} = Mod:decode(Type, Indefinite);
+ case Mod:decode(Type, Indefinite) of
+ {ok,ExpectedValue} ->
+ ok;
+ ExpectedValue ->
+ ok
+ end;
_ ->
ok
end.
diff --git a/lib/asn1/test/h323test.erl b/lib/asn1/test/h323test.erl
index 935af0ba09..41a9159335 100644
--- a/lib/asn1/test/h323test.erl
+++ b/lib/asn1/test/h323test.erl
@@ -27,6 +27,8 @@ run(per) -> run();
run(_Rules) -> ok.
run() ->
+ roundtrip('EndpointType', endpoint()),
+ roundtrip('Alerting-UUIE', alerting_uuie()),
roundtrip('H323-UserInformation', alerting_val(), alerting_enc()),
roundtrip('H323-UserInformation', connect_val(), connect_enc()),
general_string(),
@@ -36,18 +38,24 @@ alerting_val() ->
{'H323-UserInformation',
{'H323-UU-PDU',
{alerting,
- {'Alerting-UUIE',
- {0,0,8,2250,0,2},
- {'EndpointType',asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,
- asn1_NOVALUE,asn1_NOVALUE,
- {'TerminalInfo',asn1_NOVALUE},
- false,false},
- asn1_NOVALUE,
- {'CallIdentifier',<<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>},
- asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE}},
+ alerting_uuie()},
asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE},
asn1_NOVALUE}.
+endpoint() ->
+ {'EndpointType',asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,
+ asn1_NOVALUE,asn1_NOVALUE,
+ {'TerminalInfo',asn1_NOVALUE},
+ false,false}.
+
+alerting_uuie() ->
+ {'Alerting-UUIE',
+ {0,0,8,2250,0,2},
+ endpoint(),
+ asn1_NOVALUE,
+ {'CallIdentifier',<<0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0>>},
+ asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE,asn1_NOVALUE}.
+
alerting_enc() ->
"0380060008914a0002020120110000000000000000000000000000000000".
@@ -82,6 +90,9 @@ general_string() ->
UI = <<109,64,1,57>>,
{ok, _V} = 'MULTIMEDIA-SYSTEM-CONTROL':decode(Type, UI).
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('H323-MESSAGES', T, V).
+
roundtrip(T, V, HexString) ->
Enc = asn1_test_lib:hex_to_bin(HexString),
Enc = asn1_test_lib:roundtrip_enc('H323-MESSAGES', T, V),
diff --git a/lib/asn1/test/testContextSwitchingTypes.erl b/lib/asn1/test/testContextSwitchingTypes.erl
index 10012908a9..5688d8afd6 100644
--- a/lib/asn1/test/testContextSwitchingTypes.erl
+++ b/lib/asn1/test/testContextSwitchingTypes.erl
@@ -90,5 +90,6 @@ check_object_identifier(Tuple) when is_tuple(Tuple) ->
enc_dec(T, V0) ->
M = 'ContextSwitchingTypes',
{ok,Enc} = M:encode(T, V0),
+ asn1_test_lib:map_roundtrip(M, T, Enc),
{ok,V} = M:decode(T, Enc),
V.
diff --git a/lib/asn1/test/testInfObj.erl b/lib/asn1/test/testInfObj.erl
index 5a9f47d865..c519c70cdf 100644
--- a/lib/asn1/test/testInfObj.erl
+++ b/lib/asn1/test/testInfObj.erl
@@ -197,5 +197,6 @@ roundtrip(M, T, V) ->
enc_dec(M, T, V0) ->
{ok,Enc} = M:encode(T, V0),
+ asn1_test_lib:map_roundtrip(M, T, Enc),
{ok,V} = M:decode(T, Enc),
V.
diff --git a/lib/asn1/test/testMaps.erl b/lib/asn1/test/testMaps.erl
new file mode 100644
index 0000000000..45dd2255ba
--- /dev/null
+++ b/lib/asn1/test/testMaps.erl
@@ -0,0 +1,50 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+-module(testMaps).
+
+-export([main/1]).
+
+main(_) ->
+ M = 'Maps',
+ true = M:maps(),
+
+ true = M:xy1() =:= #{x=>42,y=>17},
+ true = M:xy2() =:= #{x=>0,y=>0},
+ true = M:xy3() =:= #{x=>0,y=>999},
+ true = M:s1() =:= #{xy=>#{x=>100,y=>100}},
+
+ roundtrip('XY', M:xy1()),
+ roundtrip('XY', M:xy2()),
+ roundtrip('XY', M:xy3()),
+ roundtrip('XY', #{}, #{x=>0,y=>0}),
+
+ roundtrip('S', M:s1()),
+ roundtrip('S', #{}, #{xy=>#{x=>100,y=>100}}),
+ roundtrip('S', #{os=><<1,2,3>>}, #{xy=>#{x=>100,y=>100},
+ os=><<1,2,3>>}),
+
+ ok.
+
+roundtrip(Type, Value) ->
+ roundtrip(Type, Value, Value).
+
+roundtrip(Type, Value, Expected) ->
+ asn1_test_lib:roundtrip('Maps', Type, Value, Expected).
diff --git a/lib/asn1/test/testMultipleLevels.erl b/lib/asn1/test/testMultipleLevels.erl
index c610e59f3d..e9d83665aa 100644
--- a/lib/asn1/test/testMultipleLevels.erl
+++ b/lib/asn1/test/testMultipleLevels.erl
@@ -24,5 +24,7 @@
main(_) ->
Data = {'Top',{short,"abc"},{long,"a long string follows here"}},
- {ok,B} = 'MultipleLevels':encode('Top', Data),
- {ok,Data} = 'MultipleLevels':decode('Top', iolist_to_binary(B)).
+ roundtrip('Top', Data).
+
+roundtrip(T, V) ->
+ asn1_test_lib:roundtrip('MultipleLevels', T, V).
diff --git a/lib/asn1/test/testNBAPsystem.erl b/lib/asn1/test/testNBAPsystem.erl
index 1af283af42..8d61ca18ce 100644
--- a/lib/asn1/test/testNBAPsystem.erl
+++ b/lib/asn1/test/testNBAPsystem.erl
@@ -84,7 +84,7 @@ compile(Config, Options) ->
M <- ["NBAP-CommonDataTypes.asn",
"NBAP-IEs.asn",
"NBAP-PDU-Contents.asn",
- "NBAP-PDU-Discriptions.asn",
+ "NBAP-PDU-Descriptions.asn",
"NBAP-Constants.asn",
"NBAP-Containers.asn"]],
asn1_test_lib:compile_all(Fs, Config, Options),
@@ -98,16 +98,16 @@ test(_Erule,Config) ->
ticket_5812(Config) ->
Msg = v_5812(),
- {ok,B2} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg),
+ {ok,B2} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg),
V = <<0,28,74,0,3,48,0,0,1,0,123,64,41,0,0,0,126,64,35,95,208,2,89,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,145,0,1,205,0,0,0,0,2,98,64,1,128>>,
ok = compare(V,B2),
- {ok,Msg2} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B2),
+ {ok,Msg2} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B2),
ok = check_record_names(Msg2,Config).
enc_audit_req_msg() ->
Msg = {initiatingMessage, audit_req_msg()},
- {ok,B} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg),
- {ok,_Msg} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B),
+ {ok,B} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg),
+ {ok,_Msg} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B),
{initiatingMessage,
#'InitiatingMessage'{value=#'AuditRequest'{protocolIEs=[{_,114,ignore,_}],
protocolExtensions = asn1_NOVALUE}}} = _Msg,
@@ -116,8 +116,8 @@ enc_audit_req_msg() ->
cell_setup_req_msg_test() ->
Msg = {initiatingMessage, cell_setup_req_msg()},
- {ok,B} = 'NBAP-PDU-Discriptions':encode('NBAP-PDU', Msg),
- {ok,_Msg} = 'NBAP-PDU-Discriptions':decode('NBAP-PDU', B),
+ {ok,B} = 'NBAP-PDU-Descriptions':encode('NBAP-PDU', Msg),
+ {ok,_Msg} = 'NBAP-PDU-Descriptions':decode('NBAP-PDU', B),
io:format("Msg: ~P~n~n_Msg: ~P~n",[Msg,15,_Msg,15]),
ok.
diff --git a/lib/asn1/test/testRfcs.erl b/lib/asn1/test/testRfcs.erl
index da7333ef98..20176e35eb 100644
--- a/lib/asn1/test/testRfcs.erl
+++ b/lib/asn1/test/testRfcs.erl
@@ -35,22 +35,27 @@ compile(Config, Erules, Options0) ->
asn1_test_lib:compile_all(Specs, Config, [Erules,{i,CaseDir}|Options]).
test() ->
- {1,3,6,1,5,5,7,48,1,2} =
- IdPkixOcspNonce =
- 'OCSP-2009':'id-pkix-ocsp-nonce'(),
- roundtrip('OCSP-2009', 'OCSPRequest',
- {'OCSPRequest',
- {'TBSRequest',
- 0,
- {rfc822Name,"name string"},
- [{'Request',
- {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE},
- <<"POTATOHASH">>,<<"HASHBROWN">>,42},
- [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}],
- asn1_NOVALUE},
- asn1_NOVALUE}),
- otp_7759(),
- ok.
+ M = 'OCSP-2009',
+ case M:maps() of
+ false ->
+ {1,3,6,1,5,5,7,48,1,2} =
+ IdPkixOcspNonce =
+ 'OCSP-2009':'id-pkix-ocsp-nonce'(),
+ roundtrip('OCSP-2009', 'OCSPRequest',
+ {'OCSPRequest',
+ {'TBSRequest',
+ 0,
+ {rfc822Name,"name string"},
+ [{'Request',
+ {'CertID',{'_',{2,9,3,4,5},asn1_NOVALUE},
+ <<"POTATOHASH">>,<<"HASHBROWN">>,42},
+ [{'_',IdPkixOcspNonce,true,<<34,159,16,57,199>>}]}],
+ asn1_NOVALUE},
+ asn1_NOVALUE}),
+ otp_7759(records);
+ true ->
+ otp_7759(maps)
+ end.
roundtrip(Module, Type, Value0) ->
Enc = Module:encode(Type, Value0),
@@ -58,7 +63,7 @@ roundtrip(Module, Type, Value0) ->
asn1_test_lib:match_value(Value0, Value1),
ok.
-otp_7759() ->
+otp_7759(Pack) ->
%% The release note for asn-1.6.6 says:
%% Decode of an open_type when the value was empty tagged
%% type encoded with indefinite length failed.
@@ -66,10 +71,15 @@ otp_7759() ->
Encoded = encoded_msg(),
ContentInfo = Mod:decode('ContentInfo', Encoded),
io:format("~p\n", [ContentInfo]),
- {'ContentInfo',_Id,PKCS7_content} = ContentInfo,
- X = Mod:decode('SignedData', PKCS7_content),
+ Content = case ContentInfo of
+ {'ContentInfo',_Id,Content0} when Pack =:= records ->
+ Content0;
+ #{'content-type':=_,'pkcs7-content':=Content0}
+ when Pack =:= maps ->
+ Content0
+ end,
+ X = Mod:decode('SignedData', Content),
io:format("~p\n", [X]),
- io:nl(),
ok.
encoded_msg() ->
diff --git a/lib/asn1/test/testSeqExtension.erl b/lib/asn1/test/testSeqExtension.erl
index f7885cb002..be1d1c2490 100644
--- a/lib/asn1/test/testSeqExtension.erl
+++ b/lib/asn1/test/testSeqExtension.erl
@@ -31,6 +31,7 @@
-record('SeqExt4',{bool, int}).
-record('SeqExt5',{name, shoesize}).
-record('SeqExt6',{i1,i2,i3,i4,i5,i6,i7}).
+-record('SeqExt7',{a=asn1_NOVALUE,b=asn1_NOVALUE,c}).
-record('SuperSeq',{s1,s2,s3,s4,s5,s6,i}).
main(Erule, DataDir, Opts) ->
@@ -45,8 +46,35 @@ main(Erule, DataDir, Opts) ->
roundtrip('SeqExt4', #'SeqExt4'{bool=true,int=12345}),
roundtrip('SeqExt4', #'SeqExt4'{bool=false,int=123456}),
+ case Erule of
+ ber ->
+ %% BER currently does not handle Extension Addition Groups
+ %% correctly.
+ ok;
+ _ ->
+ v_roundtrip3('SeqExt5', #'SeqExt5'{name=asn1_NOVALUE,
+ shoesize=asn1_NOVALUE},
+ Erule, #{per=>"00",
+ uper=>"00"}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{c=asn1_NOVALUE},
+ Erule, #{per=>"00",
+ uper=>"00"})
+ end,
roundtrip('SeqExt5', #'SeqExt5'{name = <<"Arne">>,shoesize=47}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{c=false},
+ Erule, #{per=>"80800100",
+ uper=>"80808000"}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{c=true},
+ Erule, #{per=>"80800120",
+ uper=>"80809000"}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{a=777,b = <<16#AA>>,c=false},
+ Erule, #{per=>"808006C0 030901AA 00",
+ uper=>"8082E061 20354000"}),
+ v_roundtrip3('SeqExt7', #'SeqExt7'{a=8888,c=false},
+ Erule, #{per=>"80800480 22B800",
+ uper=>"8081C457 0000"}),
+
%% Encode a value with this version of the specification.
BigInt = 128638468966,
SuperSeq = #'SuperSeq'{s1=#'SeqExt1'{},
@@ -106,6 +134,7 @@ main(Erule, DataDir, Opts) ->
v_roundtrip2(Erule, 'SeqExt130',
list_to_tuple(['SeqExt130'|
lists:duplicate(129, asn1_NOVALUE)++[199]])),
+
ok.
roundtrip(Type, Value) ->
@@ -118,6 +147,15 @@ v_roundtrip2(Erule, Type, Value) ->
roundtrip2(Type, Value) ->
asn1_test_lib:roundtrip_enc('SeqExtension2', Type, Value).
+v_roundtrip3(Type, Value, Erule, Map) ->
+ case maps:find(Erule, Map) of
+ {ok,Hex} ->
+ Encoded = asn1_test_lib:hex_to_bin(Hex),
+ Encoded = asn1_test_lib:roundtrip_enc('SeqExtension', Type, Value);
+ error ->
+ asn1_test_lib:roundtrip('SeqExtension', Type, Value)
+ end.
+
v(ber, 'SeqExt66') -> "30049F41 017D";
v(per, 'SeqExt66') -> "C0420000 00000000 00004001 FA";
v(uper, 'SeqExt66') -> "D0800000 00000000 00101FA0";
diff --git a/lib/asn1/test/testTCAP.erl b/lib/asn1/test/testTCAP.erl
index 422ae1f0fc..a6f0f9fad7 100644
--- a/lib/asn1/test/testTCAP.erl
+++ b/lib/asn1/test/testTCAP.erl
@@ -92,5 +92,6 @@ test_asn1config() ->
enc_dec(T, V0) ->
M = 'TCAPPackage',
{ok,Enc} = M:encode(T, V0),
+ asn1_test_lib:map_roundtrip(M, T, Enc),
{ok,V} = M:decode(T, Enc),
V.
diff --git a/lib/asn1/test/testTimer.erl b/lib/asn1/test/testTimer.erl
index bd8da85735..3edeb1b712 100644
--- a/lib/asn1/test/testTimer.erl
+++ b/lib/asn1/test/testTimer.erl
@@ -25,7 +25,42 @@
-define(times, 5000).
-val() ->
+go() ->
+ Module = 'H323-MESSAGES',
+ Type = 'H323-UserInformation',
+ Value = case Module:maps() of
+ false -> val_records();
+ true -> val_maps()
+ end,
+ Bytes = Module:encode(Type, Value),
+ Value = Module:decode(Type, Bytes),
+
+ {ValWr,done} = timer:tc(fun() -> encode(?times, Module, Type, Value) end),
+ io:format("ASN.1 encoding: ~p micro~n", [ValWr / ?times]),
+
+ done = decode(2, Module, Type, Bytes),
+
+ {ValRead,done} = timer:tc(fun() -> decode(?times, Module, Type, Bytes) end),
+ io:format("ASN.1 decoding: ~p micro~n", [ValRead /?times]),
+
+ Comment = "encode: "++integer_to_list(round(ValWr/?times)) ++
+ " micro, decode: "++integer_to_list(round(ValRead /?times)) ++
+ " micro. [" ++ atom_to_list(Module:encoding_rule()) ++ "]",
+ {comment,Comment}.
+
+encode(0, _Module,_Type,_Value) ->
+ done;
+encode(N, Module,Type,Value) ->
+ Module:encode(Type, Value),
+ encode(N-1, Module, Type, Value).
+
+decode(0, _Module, _Type, _Value) ->
+ done;
+decode(N, Module, Type, Value) ->
+ Module:decode(Type, Value),
+ decode(N-1, Module, Type, Value).
+
+val_records() ->
{'H323-UserInformation',{'H323-UU-PDU',
{callProceeding,
{'CallProceeding-UUIE',
@@ -126,34 +161,66 @@ val() ->
{'H323-UserInformation_user-data',24,<<"O">>}}.
-go() ->
- Module = 'H323-MESSAGES',
- Type = 'H323-UserInformation',
- Value = val(),
- Bytes = Module:encode(Type, Value),
- Value = Module:decode(Type, Bytes),
-
- {ValWr,done} = timer:tc(fun() -> encode(?times, Module, Type, Value) end),
- io:format("ASN.1 encoding: ~p micro~n", [ValWr / ?times]),
-
- done = decode(2, Module, Type, Bytes),
-
- {ValRead,done} = timer:tc(fun() -> decode(?times, Module, Type, Bytes) end),
- io:format("ASN.1 decoding: ~p micro~n", [ValRead /?times]),
-
- Comment = "encode: "++integer_to_list(round(ValWr/?times)) ++
- " micro, decode: "++integer_to_list(round(ValRead /?times)) ++
- " micro. [" ++ atom_to_list(Module:encoding_rule()) ++ "]",
- {comment,Comment}.
-
-encode(0, _Module,_Type,_Value) ->
- done;
-encode(N, Module,Type,Value) ->
- Module:encode(Type, Value),
- encode(N-1, Module, Type, Value).
-
-decode(0, _Module, _Type, _Value) ->
- done;
-decode(N, Module, Type, Value) ->
- Module:decode(Type, Value),
- decode(N-1, Module, Type, Value).
+val_maps() ->
+#{'h323-uu-pdu' => #{h245Control => [],
+ h245Tunneling => true,
+ 'h323-message-body' => {callProceeding,#{callIdentifier => #{guid => <<"OCTET STRINGOCTE">>},
+ cryptoTokens => [{cryptoGKPwdEncr,#{algorithmOID => {1,18,467,467},
+ encryptedData => <<"OC">>,
+ paramS => #{iv8 => <<"OCTET ST">>,
+ ranInt => -7477016}}},
+ {cryptoGKPwdEncr,#{algorithmOID => {1,19,486,486},
+ encryptedData => <<>>,
+ paramS => #{iv8 => <<"OCTET ST">>,
+ ranInt => -2404513}}}],
+ destinationInfo => #{gatekeeper => #{nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,10,260}}}},
+ gateway => #{nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,13,326}}},
+ protocol => [{h320,#{dataRatesSupported => [#{channelMultiplier => 78,
+ channelRate => 1290470518,
+ nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,11,295}}}}],
+ nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,11,282}}},
+ supportedPrefixes => [#{nonStandardData => #{data => <<"O">>,
+ nonStandardIdentifier => {object,{0,12,312}}},
+ prefix => {'h323-ID',"BM"}}]}}]},
+ mc => true,
+ mcu => #{nonStandardData => #{data => <<"OC">>,
+ nonStandardIdentifier => {object,{1,13,340,340}}}},
+ nonStandardData => #{data => <<"O">>,nonStandardIdentifier => {object,{0,9,237}}},
+ terminal => #{nonStandardData => #{data => <<"OC">>,
+ nonStandardIdentifier => {object,{1,14,353,354}}}},
+ undefinedNode => true,
+ vendor => #{productId => <<"OC">>,
+ vendor => #{manufacturerCode => 16282,
+ t35CountryCode => 62,
+ t35Extension => 63},
+ versionId => <<"OC">>}},
+ fastStart => [],
+ h245Address => {ipxAddress,#{netnum => <<"OCTE">>,
+ node => <<"OCTET ">>,
+ port => <<"OC">>}},
+ h245SecurityMode => {noSecurity,'NULL'},
+ protocolIdentifier => {0,8,222},
+ tokens => [#{certificate => #{certificate => <<"OC">>,type => {1,16,405,406}},
+ challenge => <<"OCTET STR">>,
+ dhkey => #{generator => <<1:1>>,halfkey => <<1:1>>,modSize => <<1:1>>},
+ generalID => "BMP",
+ nonStandard => #{data => <<"OC">>,nonStandardIdentifier => {1,16,414,415}},
+ password => "BM",
+ random => -26430296,
+ timeStamp => 1667517741},
+ #{certificate => #{certificate => <<"OC">>,type => {1,17,442,443}},
+ challenge => <<"OCTET STRI">>,
+ dhkey => #{generator => <<1:1>>,halfkey => <<1:1>>,modSize => <<1:1>>},
+ generalID => "BMP",
+ nonStandard => #{data => <<"OC">>,nonStandardIdentifier => {1,18,452,452}},
+ password => "BMP",
+ random => -16356110,
+ timeStamp => 1817656756}]}},
+ h4501SupplementaryService => [],
+ nonStandardControl => [],
+ nonStandardData => #{data => <<>>,nonStandardIdentifier => {object,{0,3,84}}}},
+ 'user-data' => #{'protocol-discriminator' => 24,'user-information' => <<"O">>}}.
diff --git a/lib/asn1/test/testUniqueObjectSets.erl b/lib/asn1/test/testUniqueObjectSets.erl
index 4d3ec94391..30cbceb577 100644
--- a/lib/asn1/test/testUniqueObjectSets.erl
+++ b/lib/asn1/test/testUniqueObjectSets.erl
@@ -27,6 +27,7 @@ seq_roundtrip(I, D0) ->
M = 'UniqueObjectSets',
try
{ok,Enc} = M:encode('Seq', {'Seq',I,D0}),
+ asn1_test_lib:map_roundtrip(M, 'Seq', Enc),
{ok,{'Seq',I,D}} = M:decode('Seq', Enc),
D
catch C:E ->
diff --git a/lib/asn1/test/test_compile_options.erl b/lib/asn1/test/test_compile_options.erl
index ac74470537..c15e61550c 100644
--- a/lib/asn1/test/test_compile_options.erl
+++ b/lib/asn1/test/test_compile_options.erl
@@ -24,8 +24,8 @@
-include_lib("common_test/include/ct.hrl").
--export([wrong_path/1,comp/2,path/1,ticket_6143/1,noobj/1,
- record_name_prefix/1,verbose/1]).
+-export([wrong_path/1,comp/2,path/1,noobj/1,
+ record_name_prefix/1,verbose/1,maps/1]).
%% OTP-5689
wrong_path(Config) ->
@@ -64,8 +64,6 @@ path(Config) ->
file:set_cwd(CWD),
ok.
-ticket_6143(Config) -> asn1_test_lib:compile("AA1", Config, []).
-
noobj(Config) ->
DataDir = proplists:get_value(data_dir,Config),
OutDir = proplists:get_value(priv_dir,Config),
@@ -130,6 +128,28 @@ verbose(Config) when is_list(Config) ->
[] = test_server:capture_get(),
ok.
+maps(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ OutDir = proplists:get_value(case_dir, Config),
+ InFile = filename:join(DataDir, "P-Record"),
+
+ do_maps(ber, InFile, OutDir),
+ do_maps(per, InFile, OutDir),
+ do_maps(uper, InFile, OutDir).
+
+do_maps(Erule, InFile, OutDir) ->
+ Opts = [Erule,maps,{outdir,OutDir}],
+ ok = asn1ct:compile(InFile, Opts),
+
+ %% Make sure that no .hrl files are generated.
+ [] = filelib:wildcard(filename:join(OutDir, "*.hrl")),
+
+ %% Remove all generated files.
+ All = filelib:wildcard(filename:join(OutDir, "*")),
+ _ = [file:delete(N) || N <- All],
+
+ ok.
+
outfiles_check(OutDir) ->
outfiles_check(OutDir,outfiles1()).
diff --git a/lib/compiler/doc/src/compile.xml b/lib/compiler/doc/src/compile.xml
index bd488a39a5..e6470b938f 100644
--- a/lib/compiler/doc/src/compile.xml
+++ b/lib/compiler/doc/src/compile.xml
@@ -418,7 +418,7 @@ module.beam: module.erl \
without module prefix to local or imported functions before
trying with auto-imported BIFs. If the BIF is to be
called, use the <c>erlang</c> module prefix in the call, not
- <c>{ no_auto_import,[{F,A}, ...]}</c>.</p>
+ <c>{no_auto_import,[{F,A}, ...]}</c>.</p>
</note>
<p>If this option is written in the source code, as a
<c>-compile</c> directive, the syntax <c>F/A</c> can be used instead
@@ -439,6 +439,15 @@ module.beam: module.erl \
</p>
</item>
+ <tag><c>{extra_chunks, [{binary(), binary()}]}</c></tag>
+ <item>
+ <p>Pass extra chunks to be stored in the <c>.beam</c> file.
+ The extra chunks must be a list of tuples with a four byte
+ binary as chunk name followed by a binary with the chunk contents.
+ See <seealso marker="stdlib:beam_lib">beam_lib</seealso> for
+ more information.
+ </p>
+ </item>
</taglist>
<p>If warnings are turned on (option <c>report_warnings</c>
diff --git a/lib/compiler/src/beam_asm.erl b/lib/compiler/src/beam_asm.erl
index a2f5dc674c..1bda185acd 100644
--- a/lib/compiler/src/beam_asm.erl
+++ b/lib/compiler/src/beam_asm.erl
@@ -21,7 +21,7 @@
-module(beam_asm).
--export([module/4]).
+-export([module/5]).
-export([encode/2]).
-export_type([fail/0,label/0,reg/0,src/0,module_code/0,function_name/0]).
@@ -49,28 +49,26 @@
-type function_name() :: atom().
--type exports() :: [{function_name(),arity()}].
-
-type asm_function() ::
{'function',function_name(),arity(),label(),[asm_instruction()]}.
-type module_code() ::
{module(),[_],[_],[asm_function()],pos_integer()}.
--spec module(module_code(), exports(), [_], [compile:option()]) ->
+-spec module(module_code(), [{binary(), binary()}], [_], [compile:option()], [compile:option()]) ->
{'ok',binary()}.
-module(Code, Abst, SourceFile, Opts) ->
- {ok,assemble(Code, Abst, SourceFile, Opts)}.
+module(Code, ExtraChunks, SourceFile, Opts, CompilerOpts) ->
+ {ok,assemble(Code, ExtraChunks, SourceFile, Opts, CompilerOpts)}.
-assemble({Mod,Exp0,Attr0,Asm0,NumLabels}, Abst, SourceFile, Opts) ->
+assemble({Mod,Exp0,Attr0,Asm0,NumLabels}, ExtraChunks, SourceFile, Opts, CompilerOpts) ->
{1,Dict0} = beam_dict:atom(Mod, beam_dict:new()),
{0,Dict1} = beam_dict:fname(atom_to_list(Mod) ++ ".erl", Dict0),
NumFuncs = length(Asm0),
{Asm,Attr} = on_load(Asm0, Attr0),
Exp = cerl_sets:from_list(Exp0),
{Code,Dict2} = assemble_1(Asm, Exp, Dict1, []),
- build_file(Code, Attr, Dict2, NumLabels, NumFuncs, Abst, SourceFile, Opts).
+ build_file(Code, Attr, Dict2, NumLabels, NumFuncs, ExtraChunks, SourceFile, Opts, CompilerOpts).
on_load(Fs0, Attr0) ->
case proplists:get_value(on_load, Attr0) of
@@ -113,7 +111,7 @@ assemble_function([H|T], Acc, Dict0) ->
assemble_function([], Code, Dict) ->
{Code, Dict}.
-build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
+build_file(Code, Attr, Dict, NumLabels, NumFuncs, ExtraChunks, SourceFile, Opts, CompilerOpts) ->
%% Create the code chunk.
CodeChunk = chunk(<<"Code">>,
@@ -125,9 +123,9 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
Code),
%% Create the atom table chunk.
-
- {NumAtoms, AtomTab} = beam_dict:atom_table(Dict),
- AtomChunk = chunk(<<"Atom">>, <<NumAtoms:32>>, AtomTab),
+ AtomEncoding = atom_encoding(CompilerOpts),
+ {NumAtoms, AtomTab} = beam_dict:atom_table(Dict, AtomEncoding),
+ AtomChunk = chunk(atom_chunk_name(AtomEncoding), <<NumAtoms:32>>, AtomTab),
%% Create the import table chunk.
@@ -188,21 +186,30 @@ build_file(Code, Attr, Dict, NumLabels, NumFuncs, Abst, SourceFile, Opts) ->
AttrChunk = chunk(<<"Attr">>, Attributes),
CompileChunk = chunk(<<"CInf">>, Compile),
- %% Create the abstract code chunk.
+ %% Compile all extra chunks.
- AbstChunk = chunk(<<"Abst">>, Abst),
+ CheckedChunks = [chunk(Key, Value) || {Key, Value} <- ExtraChunks],
%% Create IFF chunk.
Chunks = case member(slim, Opts) of
true ->
- [Essentials,AttrChunk,AbstChunk];
+ [Essentials,AttrChunk,CheckedChunks];
false ->
[Essentials,LocChunk,AttrChunk,
- CompileChunk,AbstChunk,LineChunk]
+ CompileChunk,CheckedChunks,LineChunk]
end,
build_form(<<"BEAM">>, Chunks).
+atom_encoding(Opts) ->
+ case proplists:get_bool(no_utf8_atoms, Opts) of
+ false -> utf8;
+ true -> latin1
+ end.
+
+atom_chunk_name(utf8) -> <<"AtU8">>;
+atom_chunk_name(latin1) -> <<"Atom">>.
+
%% finalize_fun_table(Essentials, MD5) -> FinalizedEssentials
%% Update the 'old_uniq' field in the entry for each fun in the
%% 'FunT' chunk. We'll use part of the MD5 for the module as a
diff --git a/lib/compiler/src/beam_dict.erl b/lib/compiler/src/beam_dict.erl
index 719d799fd7..990e86062a 100644
--- a/lib/compiler/src/beam_dict.erl
+++ b/lib/compiler/src/beam_dict.erl
@@ -24,7 +24,7 @@
-export([new/0,opcode/2,highest_opcode/1,
atom/2,local/4,export/4,import/4,
string/2,lambda/3,literal/2,line/2,fname/2,
- atom_table/1,local_table/1,export_table/1,import_table/1,
+ atom_table/2,local_table/1,export_table/1,import_table/1,
string_table/1,lambda_table/1,literal_table/1,
line_table/1]).
@@ -197,15 +197,15 @@ fname(Name, #asm{fnames=Fnames}=Dict) ->
end.
%% Returns the atom table.
-%% atom_table(Dict) -> {LastIndex,[Length,AtomString...]}
--spec atom_table(bdict()) -> {non_neg_integer(), [[non_neg_integer(),...]]}.
+%% atom_table(Dict, Encoding) -> {LastIndex,[Length,AtomString...]}
+-spec atom_table(bdict(), latin1 | utf8) -> {non_neg_integer(), [[non_neg_integer(),...]]}.
-atom_table(#asm{atoms=Atoms}) ->
+atom_table(#asm{atoms=Atoms}, Encoding) ->
NumAtoms = maps:size(Atoms),
Sorted = lists:keysort(2, maps:to_list(Atoms)),
{NumAtoms,[begin
- L = atom_to_list(A),
- [length(L)|L]
+ L = atom_to_binary(A, Encoding),
+ [byte_size(L),L]
end || {A,_} <- Sorted]}.
%% Returns the table of local functions.
diff --git a/lib/compiler/src/compile.erl b/lib/compiler/src/compile.erl
index 069add7890..c849306c0d 100644
--- a/lib/compiler/src/compile.erl
+++ b/lib/compiler/src/compile.erl
@@ -214,11 +214,21 @@ expand_opt(report, Os) ->
expand_opt(return, Os) ->
[return_errors,return_warnings|Os];
expand_opt(r12, Os) ->
- [no_recv_opt,no_line_info|Os];
+ [no_recv_opt,no_line_info,no_utf8_atoms|Os];
expand_opt(r13, Os) ->
- [no_recv_opt,no_line_info|Os];
+ [no_recv_opt,no_line_info,no_utf8_atoms|Os];
expand_opt(r14, Os) ->
- [no_line_info|Os];
+ [no_line_info,no_utf8_atoms|Os];
+expand_opt(r15, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r16, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r17, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r18, Os) ->
+ [no_utf8_atoms|Os];
+expand_opt(r19, Os) ->
+ [no_utf8_atoms|Os];
expand_opt({debug_info_key,_}=O, Os) ->
[encrypt_debug_info,O|Os];
expand_opt(no_float_opt, Os) ->
@@ -305,19 +315,25 @@ format_error_reason(Reason) ->
mod_options=[] :: [option()], %Options for module_info
encoding=none :: none | epp:source_encoding(),
errors=[] :: [err_warn_info()],
- warnings=[] :: [err_warn_info()]}).
+ warnings=[] :: [err_warn_info()],
+ extra_chunks=[] :: [{binary(), binary()}]}).
internal({forms,Forms}, Opts0) ->
{_,Ps} = passes(forms, Opts0),
Source = proplists:get_value(source, Opts0, ""),
Opts1 = proplists:delete(source, Opts0),
- Compile = #compile{options=Opts1,mod_options=Opts1},
+ Compile = build_compile(Opts1),
internal_comp(Ps, Forms, Source, "", Compile);
internal({file,File}, Opts) ->
{Ext,Ps} = passes(file, Opts),
- Compile = #compile{options=Opts,mod_options=Opts},
+ Compile = build_compile(Opts),
internal_comp(Ps, none, File, Ext, Compile).
+build_compile(Opts0) ->
+ ExtraChunks = proplists:get_value(extra_chunks, Opts0, []),
+ Opts1 = proplists:delete(extra_chunks, Opts0),
+ #compile{options=Opts1,mod_options=Opts1,extra_chunks=ExtraChunks}.
+
internal_comp(Passes, Code0, File, Suffix, St0) ->
Dir = filename:dirname(File),
Base = filename:basename(File, Suffix),
@@ -1376,13 +1392,15 @@ encrypt({des3_cbc=Type,Key,IVec,BlockSize}, Bin0) ->
save_core_code(Code, St) ->
{ok,Code,St#compile{core_code=cerl:from_records(Code)}}.
-beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,mod_options=Opts0}=St) ->
+beam_asm(Code0, #compile{ifile=File,abstract_code=Abst,extra_chunks=ExtraChunks,
+ options=CompilerOpts,mod_options=Opts0}=St) ->
Source = paranoid_absname(File),
Opts1 = lists:map(fun({debug_info_key,_}) -> {debug_info_key,'********'};
(Other) -> Other
end, Opts0),
Opts2 = [O || O <- Opts1, effects_code_generation(O)],
- case beam_asm:module(Code0, Abst, Source, Opts2) of
+ Chunks = [{<<"Abst">>, Abst} | ExtraChunks],
+ case beam_asm:module(Code0, Chunks, Source, Opts2, CompilerOpts) of
{ok,Code} -> {ok,Code,St#compile{abstract_code=[]}}
end.
diff --git a/lib/compiler/test/compile_SUITE.erl b/lib/compiler/test/compile_SUITE.erl
index 8c09414a52..10740ac2b0 100644
--- a/lib/compiler/test/compile_SUITE.erl
+++ b/lib/compiler/test/compile_SUITE.erl
@@ -30,7 +30,7 @@
file_1/1, forms_2/1, module_mismatch/1, big_file/1, outdir/1,
binary/1, makedep/1, cond_and_ifdef/1, listings/1, listings_big/1,
other_output/1, kernel_listing/1, encrypted_abstr/1,
- strict_record/1,
+ strict_record/1, utf8_atoms/1, extra_chunks/1,
cover/1, env/1, core/1,
core_roundtrip/1, asm/1, optimized_guards/1,
sys_pre_attributes/1, dialyzer/1,
@@ -48,7 +48,7 @@ all() ->
[app_test, appup_test, file_1, forms_2, module_mismatch, big_file, outdir,
binary, makedep, cond_and_ifdef, listings, listings_big,
other_output, kernel_listing, encrypted_abstr,
- strict_record,
+ strict_record, utf8_atoms, extra_chunks,
cover, env, core, core_roundtrip, asm, optimized_guards,
sys_pre_attributes, dialyzer, warnings, pre_load_check,
env_compiler_options].
@@ -450,8 +450,10 @@ do_kernel_listing({M,A}) ->
try
{ok,M,Kern} = compile:forms(A, [to_kernel]),
IoList = v3_kernel_pp:format(Kern),
- _ = iolist_size(IoList),
- ok
+ case unicode:characters_to_binary(IoList) of
+ Bin when is_binary(Bin) ->
+ ok
+ end
catch
throw:{error,Error} ->
io:format("*** compilation failure '~p' for module ~s\n",
@@ -680,6 +682,32 @@ test_sloppy() ->
{1,2} = record_access:test(Turtle),
Turtle.
+utf8_atoms(Config) when is_list(Config) ->
+ Anno = erl_anno:new(1),
+ Atom = binary_to_atom(<<"こんにちは"/utf8>>, utf8),
+ Forms = [{attribute,Anno,compile,[export_all]},
+ {function,Anno,atom,0,[{clause,Anno,[],[],[{atom,Anno,Atom}]}]}],
+
+ Utf8AtomForms = [{attribute,Anno,module,utf8_atom}|Forms],
+ {ok,utf8_atom,Utf8AtomBin} =
+ compile:forms(Utf8AtomForms, [binary]),
+ {ok,{utf8_atom,[{atoms,_}]}} =
+ beam_lib:chunks(Utf8AtomBin, [atoms]),
+ code:load_binary(utf8_atom, "compile_SUITE", Utf8AtomBin),
+ Atom = utf8_atom:atom(),
+
+ NoUtf8AtomForms = [{attribute,Anno,module,no_utf8_atom}|Forms],
+ error = compile:forms(NoUtf8AtomForms, [binary, r19]).
+
+extra_chunks(Config) when is_list(Config) ->
+ Anno = erl_anno:new(1),
+ Forms = [{attribute,Anno,module,extra_chunks}],
+
+ {ok,extra_chunks,ExtraChunksBinary} =
+ compile:forms(Forms, [binary, {extra_chunks, [{<<"ExCh">>, <<"Contents">>}]}]),
+ {ok,{extra_chunks,[{"ExCh",<<"Contents">>}]}} =
+ beam_lib:chunks(ExtraChunksBinary, ["ExCh"]).
+
env(Config) when is_list(Config) ->
{Simple,Target} = get_files(Config, simple, env),
{ok,Cwd} = file:get_cwd(),
@@ -751,7 +779,7 @@ do_core_1(M, A, Outdir) ->
{ok,M,Core0} = compile:forms(A, [to_core]),
CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
CorePP = core_pp:format(Core0),
- ok = file:write_file(CoreFile, CorePP),
+ ok = file:write_file(CoreFile, unicode:characters_to_binary(CorePP)),
%% Parse the .core file and return the result as Core Erlang Terms.
Core = case compile:file(CoreFile, [report_errors,from_core,no_copt,to_core,binary]) of
@@ -823,7 +851,7 @@ do_core_roundtrip_1(Mod, Abstr, Outdir) ->
do_core_roundtrip_2(M, Core0, Outdir) ->
CoreFile = filename:join(Outdir, atom_to_list(M)++".core"),
CorePP = core_pp:format_all(Core0),
- ok = file:write_file(CoreFile, CorePP),
+ ok = file:write_file(CoreFile, unicode:characters_to_binary(CorePP)),
%% Parse the .core file and return the result as Core Erlang Terms.
Core2 = case compile:file(CoreFile, [report_errors,from_core,
diff --git a/lib/compiler/test/compile_SUITE_data/simple.erl b/lib/compiler/test/compile_SUITE_data/simple.erl
index d8324dafaf..9385d101e0 100644
--- a/lib/compiler/test/compile_SUITE_data/simple.erl
+++ b/lib/compiler/test/compile_SUITE_data/simple.erl
@@ -19,7 +19,7 @@
%%
-module(simple).
--export([test/0]).
+-export([test/0,unicode/0]).
-ifdef(need_foo).
-export([foo/0]).
@@ -28,6 +28,9 @@
test() ->
passed.
+unicode() ->
+ {"это",'спутник'}.
+
%% Conditional inclusion.
%% Compile with [{d, need_foo}, {d, foo_value, 42}].
diff --git a/lib/compiler/test/lc_SUITE.erl b/lib/compiler/test/lc_SUITE.erl
index adb96fb87d..76dfaee482 100644
--- a/lib/compiler/test/lc_SUITE.erl
+++ b/lib/compiler/test/lc_SUITE.erl
@@ -227,7 +227,7 @@ effect(Config) when is_list(Config) ->
lc_SUITE ->
_ = [{'EXIT',{badarg,_}} =
(catch binary_to_atom(<<C/utf8>>, utf8)) ||
- C <- lists:seq(16#10000, 16#FFFFF)];
+ C <- lists:seq(16#FF10000, 16#FFFFFFF)];
_ ->
ok
end,
diff --git a/lib/compiler/test/map_SUITE.erl b/lib/compiler/test/map_SUITE.erl
index 36e82c1459..5e90b79aa2 100644
--- a/lib/compiler/test/map_SUITE.erl
+++ b/lib/compiler/test/map_SUITE.erl
@@ -1559,7 +1559,6 @@ t_warn_pair_key_overloaded(Config) when is_list(Config) ->
"hi2" => lists:subtract([1,2],[1]),
"hi3" => +3,
"hi1" => erlang:min(1,2),
- "hi1" => erlang:hash({1,2},35),
"hi1" => erlang:phash({1,2},33),
"hi1" => erlang:phash2({1,2},34),
"hi1" => erlang:integer_to_binary(1337),
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index 38b49c7a76..44c3fc4f06 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -61,7 +61,6 @@
#include <openssl/evp.h>
#include <openssl/hmac.h>
-
/* Helper macro to construct a OPENSSL_VERSION_NUMBER.
* See openssl/opensslv.h
*/
@@ -285,6 +284,7 @@ static INLINE int DSA_set0_pqg(DSA *d, BIGNUM *p, BIGNUM *q, BIGNUM *g)
static INLINE int DH_set0_key(DH *dh, BIGNUM *pub_key, BIGNUM *priv_key);
static INLINE int DH_set0_pqg(DH *dh, BIGNUM *p, BIGNUM *q, BIGNUM *g);
+static INLINE int DH_set_length(DH *dh, long length);
static INLINE void DH_get0_pqg(const DH *dh,
const BIGNUM **p, const BIGNUM **q, const BIGNUM **g);
static INLINE void DH_get0_key(const DH *dh,
@@ -305,6 +305,12 @@ static INLINE int DH_set0_pqg(DH *dh, BIGNUM *p, BIGNUM *q, BIGNUM *g)
return 1;
}
+static INLINE int DH_set_length(DH *dh, long length)
+{
+ dh->length = length;
+ return 1;
+}
+
static INLINE void
DH_get0_pqg(const DH *dh, const BIGNUM **p, const BIGNUM **q, const BIGNUM **g)
{
@@ -430,7 +436,7 @@ static ErlNifFunc nif_funcs[] = {
{"rsa_private_crypt", 4, rsa_private_crypt},
{"dh_generate_parameters_nif", 2, dh_generate_parameters_nif},
{"dh_check", 1, dh_check},
- {"dh_generate_key_nif", 3, dh_generate_key_nif},
+ {"dh_generate_key_nif", 4, dh_generate_key_nif},
{"dh_compute_key_nif", 3, dh_compute_key_nif},
{"srp_value_B_nif", 5, srp_value_B_nif},
{"srp_user_secret_nif", 7, srp_user_secret_nif},
@@ -2867,7 +2873,7 @@ static ERL_NIF_TERM dh_check(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]
}
static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (PrivKey, DHParams=[P,G], Mpint) */
+{/* (PrivKey|undefined, DHParams=[P,G], Mpint, Len|0) */
DH* dh_params;
int pub_len, prv_len;
unsigned char *pub_ptr, *prv_ptr;
@@ -2875,6 +2881,7 @@ static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_
int mpint; /* 0 or 4 */
BIGNUM *priv_key = NULL;
BIGNUM *dh_p = NULL, *dh_g = NULL;
+ unsigned long len = 0;
if (!(get_bn_from_bin(env, argv[0], &priv_key)
|| argv[0] == atom_undefined)
@@ -2883,8 +2890,10 @@ static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_
|| !enif_get_list_cell(env, tail, &head, &tail)
|| !get_bn_from_bin(env, head, &dh_g)
|| !enif_is_empty_list(env, tail)
- || !enif_get_int(env, argv[2], &mpint) || (mpint & ~4)) {
- if (priv_key) BN_free(priv_key);
+ || !enif_get_int(env, argv[2], &mpint) || (mpint & ~4)
+ || !enif_get_ulong(env, argv[3], &len) ) {
+
+ if (priv_key) BN_free(priv_key);
if (dh_p) BN_free(dh_p);
if (dh_g) BN_free(dh_g);
return enif_make_badarg(env);
@@ -2894,6 +2903,15 @@ static ERL_NIF_TERM dh_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_
DH_set0_key(dh_params, NULL, priv_key);
DH_set0_pqg(dh_params, dh_p, NULL, dh_g);
+ if (len) {
+ if (len < BN_num_bits(dh_p))
+ DH_set_length(dh_params, len);
+ else {
+ DH_free(dh_params);
+ return enif_make_badarg(env);
+ }
+ }
+
if (DH_generate_key(dh_params)) {
const BIGNUM *pub_key, *priv_key;
DH_get0_key(dh_params, &pub_key, &priv_key);
diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml
index cbf141b3b0..a4b34657ba 100644
--- a/lib/crypto/doc/src/crypto.xml
+++ b/lib/crypto/doc/src/crypto.xml
@@ -103,7 +103,7 @@
<code>dh_private() = key_value() </code>
- <code>dh_params() = [key_value()] = [P, G] </code>
+ <code>dh_params() = [key_value()] = [P, G] | [P, G, PrivateKeyBitLength]</code>
<code>ecdh_public() = key_value() </code>
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index 9767a13dfc..5a915d4233 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -426,9 +426,15 @@ exor(Bin1, Bin2) ->
generate_key(Type, Params) ->
generate_key(Type, Params, undefined).
-generate_key(dh, DHParameters, PrivateKey) ->
+generate_key(dh, DHParameters0, PrivateKey) ->
+ {DHParameters, Len} =
+ case DHParameters0 of
+ [P,G,L] -> {[P,G], L};
+ [P,G] -> {[P,G], 0}
+ end,
dh_generate_key_nif(ensure_int_as_bin(PrivateKey),
- map_ensure_int_as_bin(DHParameters), 0);
+ map_ensure_int_as_bin(DHParameters),
+ 0, Len);
generate_key(srp, {host, [Verifier, Generator, Prime, Version]}, PrivArg)
when is_binary(Verifier), is_binary(Generator), is_binary(Prime), is_atom(Version) ->
@@ -807,7 +813,7 @@ dh_check([_Prime,_Gen]) -> ?nif_stub.
%% DHParameters = [P (Prime)= mpint(), G(Generator) = mpint()]
%% PrivKey = mpint()
-dh_generate_key_nif(_PrivateKey, _DHParameters, _Mpint) -> ?nif_stub.
+dh_generate_key_nif(_PrivateKey, _DHParameters, _Mpint, _Length) -> ?nif_stub.
%% DHParameters = [P (Prime)= mpint(), G(Generator) = mpint()]
%% MyPrivKey, OthersPublicKey = mpint()
diff --git a/lib/debugger/test/map_SUITE.erl b/lib/debugger/test/map_SUITE.erl
index 42484ff723..4d8a86f5a2 100644
--- a/lib/debugger/test/map_SUITE.erl
+++ b/lib/debugger/test/map_SUITE.erl
@@ -1714,10 +1714,8 @@ t_bif_map_values(Config) when is_list(Config) ->
t_erlang_hash(Config) when is_list(Config) ->
-
ok = t_bif_erlang_phash2(),
ok = t_bif_erlang_phash(),
- ok = t_bif_erlang_hash(),
ok.
t_bif_erlang_phash2() ->
@@ -1759,26 +1757,6 @@ t_bif_erlang_phash() ->
2620391445 = erlang:phash(M2,Sz), % 3590546636
ok.
-t_bif_erlang_hash() ->
- Sz = 1 bsl 27 - 1,
- 39684169 = erlang:hash(#{},Sz), % 5158
- 33673142 = erlang:hash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz), % 71555838
- 95337869 = erlang:hash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz), % 5497225
- 108959561 = erlang:hash(#{ 1 => a },Sz), % 126071654
- 59623150 = erlang:hash(#{ a => 1 },Sz), % 126426236
-
- 42775386 = erlang:hash(#{{} => <<>>},Sz), % 101655720
- 71692856 = erlang:hash(#{<<>> => {}},Sz), % 101655720
-
- M0 = #{ a => 1, "key" => <<"value">> },
- M1 = maps:remove("key",M0),
- M2 = M1#{ "key" => <<"value">> },
-
- 70254632 = erlang:hash(M0,Sz), % 38260486
- 59623150 = erlang:hash(M1,Sz), % 126426236
- 70254632 = erlang:hash(M2,Sz), % 38260486
- ok.
-
t_map_encode_decode(Config) when is_list(Config) ->
<<131,116,0,0,0,0>> = erlang:term_to_binary(#{}),
Pairs = [
diff --git a/lib/dialyzer/RELEASE_NOTES b/lib/dialyzer/RELEASE_NOTES
index 2457faa07a..299cc8642f 100644
--- a/lib/dialyzer/RELEASE_NOTES
+++ b/lib/dialyzer/RELEASE_NOTES
@@ -181,7 +181,7 @@ Version 1.8.0 (in Erlang/OTP R12B-2)
- Dialyzer has a new warning option -Wunmatched_returns which warns for
function calls that ignore the return value.
This catches many common programming errors (e.g. calling file:close/1
- and not checking for the absense of errors), interface discrepancies
+ and not checking for the absence of errors), interface discrepancies
(e.g. a function returning multiple values when in reality the function
is void and only called for its side-effects), calling the wrong function
(e.g. io_lib:format/1 instead of io:format/1), and even possible
diff --git a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
index ae1e4d8c38..aeeb895a0c 100644
--- a/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_analysis_callgraph.erl
@@ -114,7 +114,6 @@ loop(#server_state{parent = Parent} = State,
%% The Analysis
%%--------------------------------------------------------------------
-%% Calls to erlang:garbage_collect() help to reduce the heap size.
analysis_start(Parent, Analysis, LegalWarnings) ->
CServer = dialyzer_codeserver:new(),
Plt = Analysis#analysis.plt,
@@ -136,11 +135,9 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
%% Remote type postprocessing
NewCServer =
try
- NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer0),
+ TmpCServer1 = dialyzer_utils:merge_types(TmpCServer0, Plt),
NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer0),
- OldRecords = dialyzer_plt:get_types(Plt),
OldExpTypes0 = dialyzer_plt:get_exported_types(Plt),
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
RemMods =
[case Analysis#analysis.start_from of
byte_code -> list_to_atom(filename:basename(F, ".beam"));
@@ -148,25 +145,20 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
end || F <- Files],
OldExpTypes1 = dialyzer_utils:sets_filter(RemMods, OldExpTypes0),
MergedExpTypes = sets:union(NewExpTypes, OldExpTypes1),
- TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer0),
TmpCServer2 =
dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
- erlang:garbage_collect(),
+ erlang:garbage_collect(), % reduce heap size
?timing(State#analysis_state.timing_server, "remote",
contracts_and_records(TmpCServer2))
catch
throw:{error, _ErrorMsg} = Error -> exit(Error)
end,
- NewPlt0 = dialyzer_plt:insert_types(Plt, dialyzer_codeserver:get_records(NewCServer)),
- ExpTypes = dialyzer_codeserver:get_exported_types(NewCServer),
- NewPlt1 = dialyzer_plt:insert_exported_types(NewPlt0, ExpTypes),
- State0 = State#analysis_state{plt = NewPlt1},
- dump_callgraph(Callgraph, State0, Analysis),
+ dump_callgraph(Callgraph, State, Analysis),
%% Remove all old versions of the files being analyzed
AllNodes = dialyzer_callgraph:all_nodes(Callgraph),
- Plt1_a = dialyzer_plt:delete_list(NewPlt1, AllNodes),
+ Plt1_a = dialyzer_plt:delete_list(Plt, AllNodes),
Plt1 = dialyzer_plt:insert_callbacks(Plt1_a, NewCServer),
- State1 = State0#analysis_state{codeserver = NewCServer, plt = Plt1},
+ State1 = State#analysis_state{codeserver = NewCServer, plt = Plt1},
Exports = dialyzer_codeserver:get_exports(NewCServer),
NonExports = sets:subtract(sets:from_list(AllNodes), Exports),
NonExportsList = sets:to_list(NonExports),
@@ -176,14 +168,17 @@ analysis_start(Parent, Analysis, LegalWarnings) ->
false -> Callgraph
end,
State2 = analyze_callgraph(NewCallgraph, State1),
- #analysis_state{plt = MiniPlt2, doc_plt = DocPlt} = State2,
+ #analysis_state{plt = MiniPlt2,
+ doc_plt = DocPlt,
+ codeserver = Codeserver0} = State2,
+ {Codeserver, MiniPlt3} = move_data(Codeserver0, MiniPlt2),
dialyzer_callgraph:dispose_race_server(NewCallgraph),
rcv_and_send_ext_types(Parent),
%% Since the PLT is never used, a dummy is sent:
DummyPlt = dialyzer_plt:new(),
- send_codeserver_plt(Parent, CServer, DummyPlt),
- MiniPlt3 = dialyzer_plt:delete_list(MiniPlt2, NonExportsList),
- send_analysis_done(Parent, MiniPlt3, DocPlt).
+ send_codeserver_plt(Parent, Codeserver, DummyPlt),
+ MiniPlt4 = dialyzer_plt:delete_list(MiniPlt3, NonExportsList),
+ send_analysis_done(Parent, MiniPlt4, DocPlt).
contracts_and_records(CodeServer) ->
Fun = contrs_and_recs(CodeServer),
@@ -200,15 +195,20 @@ contracts_and_records(CodeServer) ->
contrs_and_recs(TmpCServer2) ->
fun() ->
Parent = receive {Pid, go} -> Pid end,
- {TmpCServer3, RecordDict} =
- dialyzer_utils:process_record_remote_types(TmpCServer2),
+ TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
TmpServer4 =
- dialyzer_contracts:process_contract_remote_types(TmpCServer3,
- RecordDict),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3),
dialyzer_codeserver:give_away(TmpServer4, Parent),
exit(TmpServer4)
end.
+move_data(CServer, MiniPlt) ->
+ {CServer1, Records} = dialyzer_codeserver:extract_records(CServer),
+ MiniPlt1 = dialyzer_plt:insert_types(MiniPlt, Records),
+ {NewCServer, ExpTypes} = dialyzer_codeserver:extract_exported_types(CServer1),
+ NewMiniPlt = dialyzer_plt:insert_exported_types(MiniPlt1, ExpTypes),
+ {NewCServer, NewMiniPlt}.
+
analyze_callgraph(Callgraph, #analysis_state{codeserver = Codeserver,
doc_plt = DocPlt,
plt = Plt,
@@ -603,6 +603,7 @@ send_ext_types(Parent, ExtTypes) ->
ok.
send_codeserver_plt(Parent, CServer, Plt) ->
+ ok = dialyzer_codeserver:give_away(CServer, Parent),
Parent ! {self(), cserver, CServer, Plt},
ok.
diff --git a/lib/dialyzer/src/dialyzer_callgraph.erl b/lib/dialyzer/src/dialyzer_callgraph.erl
index 68f3d7a240..6387f3d1e4 100644
--- a/lib/dialyzer/src/dialyzer_callgraph.erl
+++ b/lib/dialyzer/src/dialyzer_callgraph.erl
@@ -40,7 +40,7 @@
module_postorder_from_funs/2,
new/0,
get_depends_on/2,
- get_required_by/2,
+ %% get_required_by/2,
in_neighbours/2,
renew_race_info/4,
renew_race_code/2,
@@ -250,12 +250,12 @@ get_depends_on(SCC, #callgraph{active_digraph = {'e', Out, _In, Maps}}) ->
get_depends_on(SCC, #callgraph{active_digraph = {'d', DG}}) ->
digraph:out_neighbours(DG, SCC).
--spec get_required_by(scc() | module(), callgraph()) -> [scc()].
+%% -spec get_required_by(scc() | module(), callgraph()) -> [scc()].
-get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) ->
- lookup_scc(SCC, In, Maps);
-get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
- digraph:in_neighbours(DG, SCC).
+%% get_required_by(SCC, #callgraph{active_digraph = {'e', _Out, In, Maps}}) ->
+%% lookup_scc(SCC, In, Maps);
+%% get_required_by(SCC, #callgraph{active_digraph = {'d', DG}}) ->
+%% digraph:in_neighbours(DG, SCC).
lookup_scc(SCC, Table, Maps) ->
case ets_lookup_dict({'scc', SCC}, Maps) of
@@ -285,9 +285,11 @@ module_postorder(#callgraph{digraph = DG}) ->
Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
MDG = digraph:new([acyclic]),
digraph_confirm_vertices(sets:to_list(Nodes), MDG),
- Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end,
+ Foreach = fun({M1,M2}) -> _ = digraph:add_edge(MDG, M1, M2) end,
lists:foreach(Foreach, sets:to_list(Edges)),
- {digraph_utils:topsort(MDG), {'d', MDG}}.
+ %% The out-neighbors of a vertex are the vertices called directly.
+ %% The used vertices are to occur *before* the calling vertex:
+ {lists:reverse(digraph_utils:topsort(MDG)), {'d', MDG}}.
edge_fold({{M1,_,_},{M2,_,_}}, Set) ->
case M1 =/= M2 of
@@ -305,7 +307,7 @@ module_deps(#callgraph{digraph = DG}) ->
Nodes = sets:from_list([M || {M,_F,_A} <- digraph_vertices(DG)]),
MDG = digraph:new(),
digraph_confirm_vertices(sets:to_list(Nodes), MDG),
- Foreach = fun({M1,M2}) -> digraph:add_edge(MDG, M1, M2) end,
+ Foreach = fun({M1,M2}) -> check_add_edge(MDG, M1, M2) end,
lists:foreach(Foreach, sets:to_list(Edges)),
Deps = [{N, ordsets:from_list(digraph:in_neighbours(MDG, N))}
|| N <- sets:to_list(Nodes)],
@@ -363,7 +365,7 @@ ets_lookup_set(Key, Table) ->
%% The core tree must be labeled as by cerl_trees:label/1 (or /2).
%% The set of labels in the tree must be disjoint from the set of
-%% labels already occuring in the callgraph.
+%% labels already occurring in the callgraph.
-spec scan_core_tree(cerl:c_module(), callgraph()) ->
{[mfa_or_funlbl()], [callgraph_edge()]}.
@@ -552,9 +554,21 @@ digraph_add_edge(From, To, DG) ->
false -> digraph:add_vertex(DG, To);
{To, _} -> ok
end,
- digraph:add_edge(DG, {From, To}, From, To, []),
+ check_add_edge(DG, {From, To}, From, To, []),
ok.
+check_add_edge(G, V1, V2) ->
+ case digraph:add_edge(G, V1, V2) of
+ {error, Error} -> exit({add_edge, V1, V2, Error});
+ _Edge -> ok
+ end.
+
+check_add_edge(G, E, V1, V2, L) ->
+ case digraph:add_edge(G, E, V1, V2, L) of
+ {error, Error} -> exit({add_edge, E, V1, V2, L, Error});
+ _Edge -> ok
+ end.
+
digraph_confirm_vertices([MFA|Left], DG) ->
digraph:add_vertex(DG, MFA, confirmed),
digraph_confirm_vertices(Left, DG);
@@ -762,28 +776,53 @@ to_ps(#callgraph{} = CG, File, Args) ->
ok.
condensation(G) ->
- SCCs = digraph_utils:strong_components(G),
- %% Assign unique numbers to SCCs:
- Ints = lists:seq(1, length(SCCs)),
- IntToSCC = lists:zip(Ints, SCCs),
- IntScc = sofs:relation(IntToSCC, [{int, scc}]),
- %% Subsitute strong components for vertices in edges using the
- %% unique numbers:
- C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]),
- I2V = sofs:relative_product(IntScc, C2V), % [{v, int}]
- Es = sofs:relation(digraph:edges(G), [{v, v}]),
- R1 = sofs:relative_product(I2V, Es),
- R2 = sofs:relative_product(I2V, sofs:converse(R1)),
- %% Create in- and out-neighbours:
- In = sofs:relation_to_family(sofs:strict_relation(R2)),
- R3 = sofs:converse(R2),
- Out = sofs:relation_to_family(sofs:strict_relation(R3)),
- [OutETS, InETS, MapsETS] =
- [ets:new(Name,[{read_concurrency, true}]) ||
- Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]],
- ets:insert(OutETS, sofs:to_external(Out)),
- ets:insert(InETS, sofs:to_external(In)),
- %% Create mappings from SCCs to unique integers, and the inverse:
- ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)),
- ets:insert(MapsETS, IntToSCC),
- {{'e', OutETS, InETS, MapsETS}, SCCs}.
+ erlang:garbage_collect(), % reduce heap size
+ {Pid, Ref} = erlang:spawn_monitor(do_condensation(G, self())),
+ receive {'DOWN', Ref, process, Pid, Result} ->
+ {SCCInts, OutETS, InETS, MapsETS} = Result,
+ NewSCCs = [ets:lookup_element(MapsETS, SCCInt, 2) || SCCInt <- SCCInts],
+ {{'e', OutETS, InETS, MapsETS}, NewSCCs}
+ end.
+
+-spec do_condensation(digraph:graph(), pid()) -> fun(() -> no_return()).
+
+do_condensation(G, Parent) ->
+ fun() ->
+ [OutETS, InETS, MapsETS] =
+ [ets:new(Name,[{read_concurrency, true}]) ||
+ Name <- [callgraph_deps_out, callgraph_deps_in, callgraph_scc_map]],
+ SCCs = digraph_utils:strong_components(G),
+ %% Assign unique numbers to SCCs:
+ Ints = lists:seq(1, length(SCCs)),
+ IntToSCC = lists:zip(Ints, SCCs),
+ IntScc = sofs:relation(IntToSCC, [{int, scc}]),
+ %% Create mapping from unique integers to SCCs:
+ ets:insert(MapsETS, IntToSCC),
+ %% Subsitute strong components for vertices in edges using the
+ %% unique numbers:
+ C2V = sofs:relation([{SC, V} || SC <- SCCs, V <- SC], [{scc, v}]),
+ I2V = sofs:relative_product(IntScc, C2V), % [{v, int}]
+ Es = sofs:relation(digraph:edges(G), [{v, v}]),
+ R1 = sofs:relative_product(I2V, Es),
+ R2 = sofs:relative_product(I2V, sofs:converse(R1)),
+ R2Strict = sofs:strict_relation(R2),
+ %% Create out-neighbours:
+ Out = sofs:relation_to_family(sofs:converse(R2Strict)),
+ ets:insert(OutETS, sofs:to_external(Out)),
+ %% Sort the SCCs topologically:
+ DG = sofs:family_to_digraph(Out),
+ lists:foreach(fun(I) -> digraph:add_vertex(DG, I) end, Ints),
+ SCCInts0 = digraph_utils:topsort(DG),
+ digraph:delete(DG),
+ %% The out-neighbors of a vertex are the vertices called directly.
+ %% The used vertices are to occur *before* the calling vertex:
+ SCCInts = lists:reverse(SCCInts0),
+ %% Create in-neighbours:
+ In = sofs:relation_to_family(R2Strict),
+ ets:insert(InETS, sofs:to_external(In)),
+ %% Create mapping from SCCs to unique integers:
+ ets:insert(MapsETS, lists:zip([{'scc', SCC} || SCC<- SCCs], Ints)),
+ lists:foreach(fun(E) -> true = ets:give_away(E, Parent, any)
+ end, [OutETS, InETS, MapsETS]),
+ exit({SCCInts, OutETS, InETS, MapsETS})
+ end.
diff --git a/lib/dialyzer/src/dialyzer_cl.erl b/lib/dialyzer/src/dialyzer_cl.erl
index 158ee761af..8500c59ebe 100644
--- a/lib/dialyzer/src/dialyzer_cl.erl
+++ b/lib/dialyzer/src/dialyzer_cl.erl
@@ -30,6 +30,8 @@
-record(cl_state,
{backend_pid :: pid() | 'undefined',
+ code_server = none :: 'none'
+ | dialyzer_codeserver:codeserver(),
erlang_mode = false :: boolean(),
external_calls = [] :: [mfa()],
external_types = [] :: [mfa()],
@@ -630,6 +632,9 @@ cl_loop(State, LogCache) ->
{BackendPid, warnings, Warnings} ->
NewState = store_warnings(State, Warnings),
cl_loop(NewState, LogCache);
+ {BackendPid, cserver, CodeServer, _Plt} -> % Plt is ignored
+ NewState = State#cl_state{code_server = CodeServer},
+ cl_loop(NewState, LogCache);
{BackendPid, done, NewMiniPlt, _NewDocPlt} ->
return_value(State, NewMiniPlt);
{BackendPid, ext_calls, ExtCalls} ->
@@ -647,7 +652,6 @@ cl_loop(State, LogCache) ->
cl_error(State, Msg);
_Other ->
%% io:format("Received ~p\n", [_Other]),
- %% Note: {BackendPid, cserver, CodeServer, Plt} is ignored.
cl_loop(State, LogCache)
end.
@@ -688,18 +692,34 @@ cl_error(State, Msg) ->
maybe_close_output_file(State),
throw({dialyzer_error, lists:flatten(Msg)}).
-return_value(State = #cl_state{erlang_mode = ErlangMode,
+return_value(State = #cl_state{code_server = CodeServer,
+ erlang_mode = ErlangMode,
mod_deps = ModDeps,
output_plt = OutputPlt,
plt_info = PltInfo,
stored_warnings = StoredWarnings},
MiniPlt) ->
+ %% Just for now:
+ case CodeServer =:= none of
+ true ->
+ ok;
+ false ->
+ dialyzer_codeserver:delete(CodeServer)
+ end,
case OutputPlt =:= none of
true ->
dialyzer_plt:delete(MiniPlt);
false ->
- Plt = dialyzer_plt:restore_full_plt(MiniPlt),
- dialyzer_plt:to_file(OutputPlt, Plt, ModDeps, PltInfo)
+ Fun = to_file_fun(OutputPlt, MiniPlt, ModDeps, PltInfo),
+ {Pid, Ref} = erlang:spawn_monitor(Fun),
+ dialyzer_plt:give_away(MiniPlt, Pid),
+ Pid ! go,
+ receive {'DOWN', Ref, process, Pid, Result} ->
+ case Result of
+ ok -> ok;
+ Thrown -> throw(Thrown)
+ end
+ end
end,
UnknownWarnings = unknown_warnings(State),
RetValue =
@@ -720,6 +740,16 @@ return_value(State = #cl_state{erlang_mode = ErlangMode,
{RetValue, set_warning_id(AllWarnings)}
end.
+-spec to_file_fun(_, _, _, _) -> fun(() -> no_return()).
+
+to_file_fun(Filename, MiniPlt, ModDeps, PltInfo) ->
+ fun() ->
+ receive go -> ok end,
+ Plt = dialyzer_plt:restore_full_plt(MiniPlt),
+ dialyzer_plt:to_file(Filename, Plt, ModDeps, PltInfo),
+ exit(ok)
+ end.
+
unknown_warnings(State = #cl_state{legal_warnings = LegalWarnings}) ->
Unknown = case ordsets:is_element(?WARN_UNKNOWN, LegalWarnings) of
true ->
diff --git a/lib/dialyzer/src/dialyzer_codeserver.erl b/lib/dialyzer/src/dialyzer_codeserver.erl
index f53c713bfe..a1a7370eff 100644
--- a/lib/dialyzer/src/dialyzer_codeserver.erl
+++ b/lib/dialyzer/src/dialyzer_codeserver.erl
@@ -26,18 +26,21 @@
give_away/2,
finalize_contracts/1,
finalize_exported_types/2,
- finalize_records/2,
+ finalize_records/1,
get_contracts/1,
get_callbacks/1,
get_exported_types/1,
+ extract_exported_types/1,
get_exports/1,
- get_records/1,
+ get_records_table/1,
+ extract_records/1,
get_next_core_label/1,
get_temp_contracts/2,
- contracts_modules/1,
+ all_temp_modules/1,
store_contracts/4,
get_temp_exported_types/1,
- get_temp_records/1,
+ get_temp_records_table/1,
+ lookup_temp_mod_records/2,
insert/3,
insert_exports/2,
insert_temp_exported_types/2,
@@ -52,7 +55,6 @@
lookup_meta_info/2,
new/0,
set_next_core_label/2,
- set_temp_records/2,
store_temp_records/3,
translate_fake_file/3]).
@@ -67,10 +69,8 @@
-type set_ets() :: ets:tid().
-type types() :: erl_types:type_table().
--type mod_records() :: erl_types:mod_records().
-type contracts() :: #{mfa() => dialyzer_contracts:file_contract()}.
--type mod_contracts() :: dict:dict(module(), contracts()).
%% A property-list of data compiled from -compile and -dialyzer attributes.
-type meta_info() :: [{{'nowarn_function' | dial_warn_tag()},
@@ -80,8 +80,8 @@
-record(codeserver, {next_core_label = 0 :: label(),
code :: dict_ets(),
- exported_types :: set_ets(), % set(mfa())
- records :: map_ets(),
+ exported_types :: 'clean' | set_ets(), % set(mfa())
+ records :: 'clean' | map_ets(),
contracts :: map_ets(),
callbacks :: map_ets(),
fun_meta_info :: dict_ets(), % {mfa(), meta_info()}
@@ -107,9 +107,6 @@ ets_map_store(Key, Element, Table) ->
true = ets:insert(Table, {Key, Element}),
Table.
-ets_dict_store_dict(Dict, Table) ->
- true = ets:insert(Table, dict:to_list(Dict)).
-
ets_dict_to_dict(Table) ->
Fold = fun({Key,Value}, Dict) -> dict:store(Key, Value, Dict) end,
ets:foldl(Fold, dict:new(), Table).
@@ -164,11 +161,8 @@ new() ->
-spec delete(codeserver()) -> 'ok'.
-delete(#codeserver{code = Code, exported_types = ExportedTypes,
- records = Records, contracts = Contracts,
- callbacks = Callbacks}) ->
- lists:foreach(fun ets:delete/1,
- [Code, ExportedTypes, Records, Contracts, Callbacks]).
+delete(CServer) ->
+ lists:foreach(fun(Table) -> true = ets:delete(Table) end, tables(CServer)).
-spec insert(atom(), cerl:c_module(), codeserver()) -> codeserver().
@@ -222,6 +216,11 @@ is_exported(MFA, #codeserver{exports = Exports}) ->
get_exported_types(#codeserver{exported_types = ExpTypes}) ->
ets_set_to_set(ExpTypes).
+-spec extract_exported_types(codeserver()) -> {codeserver(), set_ets()}.
+
+extract_exported_types(#codeserver{exported_types = ExpTypes} = CS) ->
+ {CS#codeserver{exported_types = 'clean'}, ExpTypes}.
+
-spec get_exports(codeserver()) -> sets:set(mfa()).
get_exports(#codeserver{exports = Exports}) ->
@@ -269,10 +268,15 @@ lookup_mod_records(Mod, #codeserver{records = RecDict}) when is_atom(Mod) ->
{ok, Map} -> Map
end.
--spec get_records(codeserver()) -> mod_records().
+-spec get_records_table(codeserver()) -> map_ets().
+
+get_records_table(#codeserver{records = RecDict}) ->
+ RecDict.
-get_records(#codeserver{records = RecDict}) ->
- ets_dict_to_dict(RecDict).
+-spec extract_records(codeserver()) -> {codeserver(), map_ets()}.
+
+extract_records(#codeserver{records = RecDict} = CS) ->
+ {CS#codeserver{records = clean}, RecDict}.
-spec store_temp_records(module(), types(), codeserver()) -> codeserver().
@@ -283,26 +287,26 @@ store_temp_records(Mod, Map, #codeserver{temp_records = TempRecDict} = CS)
false -> CS#codeserver{temp_records = ets_map_store(Mod, Map, TempRecDict)}
end.
--spec get_temp_records(codeserver()) -> mod_records().
+-spec get_temp_records_table(codeserver()) -> map_ets().
-get_temp_records(#codeserver{temp_records = TempRecDict}) ->
- ets_dict_to_dict(TempRecDict).
+get_temp_records_table(#codeserver{temp_records = TempRecDict}) ->
+ TempRecDict.
--spec set_temp_records(mod_records(), codeserver()) -> codeserver().
+-spec lookup_temp_mod_records(module(), codeserver()) -> types().
-set_temp_records(Dict, CS) ->
- true = ets:delete(CS#codeserver.temp_records),
- TempRecords = ets:new(dialyzer_codeserver_temp_records,[]),
- true = ets_dict_store_dict(Dict, TempRecords),
- CS#codeserver{temp_records = TempRecords}.
+lookup_temp_mod_records(Mod, #codeserver{temp_records = TempRecDict}) ->
+ case ets_dict_find(Mod, TempRecDict) of
+ error -> maps:new();
+ {ok, Map} -> Map
+ end.
--spec finalize_records(mod_records(), codeserver()) -> codeserver().
+-spec finalize_records(codeserver()) -> codeserver().
-finalize_records(Dict, #codeserver{temp_records = TmpRecords,
- records = Records} = CS) ->
- true = ets:delete(TmpRecords),
- true = ets_dict_store_dict(Dict, Records),
- CS#codeserver{temp_records = clean}.
+finalize_records(#codeserver{temp_records = TmpRecords,
+ records = Records} = CS) ->
+ true = ets:delete(Records),
+ ets:rename(TmpRecords, dialyzer_codeserver_records),
+ CS#codeserver{temp_records = clean, records = TmpRecords}.
-spec lookup_mod_contracts(atom(), codeserver()) -> contracts().
@@ -331,10 +335,13 @@ lookup_meta_info(MorMFA, #codeserver{fun_meta_info = FunMetaInfo}) ->
{ok, PropList} -> PropList
end.
--spec get_contracts(codeserver()) -> mod_contracts().
+-spec get_contracts(codeserver()) ->
+ dict:dict(mfa(), dialyzer_contracts:file_contract()).
get_contracts(#codeserver{contracts = ContDict}) ->
- ets_dict_to_dict(ContDict).
+ dict:filter(fun({_M, _F, _A}, _) -> true;
+ (_, _) -> false
+ end, ets_dict_to_dict(ContDict)).
-spec get_callbacks(codeserver()) -> list().
@@ -348,12 +355,14 @@ store_temp_contracts(Mod, SpecMap, CallbackMap,
#codeserver{temp_contracts = Cn,
temp_callbacks = Cb} = CS)
when is_atom(Mod) ->
+ %% Make sure Mod is stored even if there are not callbacks or
+ %% contracts.
CS1 = CS#codeserver{temp_contracts = ets_map_store(Mod, SpecMap, Cn)},
CS1#codeserver{temp_callbacks = ets_map_store(Mod, CallbackMap, Cb)}.
--spec contracts_modules(codeserver()) -> [module()].
+-spec all_temp_modules(codeserver()) -> [module()].
-contracts_modules(#codeserver{temp_contracts = TempContTable}) ->
+all_temp_modules(#codeserver{temp_contracts = TempContTable}) ->
ets:select(TempContTable, [{{'$1', '$2'}, [], ['$1']}]).
-spec store_contracts(module(), contracts(), contracts(), codeserver()) ->
@@ -380,17 +389,25 @@ get_temp_contracts(Mod, #codeserver{temp_contracts = TempContDict,
-spec give_away(codeserver(), pid()) -> 'ok'.
-give_away(#codeserver{temp_records = TempRecords,
- temp_contracts = TempContracts,
- temp_callbacks = TempCallbacks,
- records = Records,
- contracts = Contracts,
- callbacks = Callbacks}, Pid) ->
- _ = [true = ets:give_away(Table, Pid, any) ||
- Table <- [TempRecords, TempContracts, TempCallbacks,
- Records, Contracts, Callbacks],
- Table =/= clean],
- ok.
+give_away(CServer, Pid) ->
+ lists:foreach(fun(Table) -> true = ets:give_away(Table, Pid, any)
+ end, tables(CServer)).
+
+tables(#codeserver{code = Code,
+ fun_meta_info = FunMetaInfo,
+ exports = Exports,
+ temp_exported_types = TempExpTypes,
+ temp_records = TempRecords,
+ temp_contracts = TempContracts,
+ temp_callbacks = TempCallbacks,
+ exported_types = ExportedTypes,
+ records = Records,
+ contracts = Contracts,
+ callbacks = Callbacks}) ->
+ [Table || Table <- [Code, FunMetaInfo, Exports, TempExpTypes,
+ TempRecords, TempContracts, TempCallbacks,
+ ExportedTypes, Records, Contracts, Callbacks],
+ Table =/= clean].
-spec finalize_contracts(codeserver()) -> codeserver().
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index 2078e58ce8..5f24b5a668 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -24,7 +24,7 @@
get_contract_return/2,
%% get_contract_signature/1,
is_overloaded/1,
- process_contract_remote_types/2,
+ process_contract_remote_types/1,
store_tmp_contract/5]).
-export_type([file_contract/0, plt_contracts/0]).
@@ -139,18 +139,18 @@ sequence([], _Delimiter) -> "";
sequence([H], _Delimiter) -> H;
sequence([H|T], Delimiter) -> H ++ Delimiter ++ sequence(T, Delimiter).
--spec process_contract_remote_types(dialyzer_codeserver:codeserver(),
- erl_types:mod_records()) ->
+-spec process_contract_remote_types(dialyzer_codeserver:codeserver()) ->
dialyzer_codeserver:codeserver().
-process_contract_remote_types(CodeServer, RecordDict) ->
- Mods = dialyzer_codeserver:contracts_modules(CodeServer),
+process_contract_remote_types(CodeServer) ->
+ Mods = dialyzer_codeserver:all_temp_modules(CodeServer),
+ RecordTable = dialyzer_codeserver:get_records_table(CodeServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CodeServer),
ContractFun =
fun({{_M, _F, _A}=MFA, {File, TmpContract, Xtra}}, C0) ->
#tmp_contract{contract_funs = CFuns, forms = Forms} = TmpContract,
{NewCs, C2} = lists:mapfoldl(fun(CFun, C1) ->
- CFun(ExpTypes, RecordDict, C1)
+ CFun(ExpTypes, RecordTable, C1)
end, C0, CFuns),
Args = general_domain(NewCs),
Contract = #contract{contracts = NewCs, args = Args, forms = Forms},
@@ -177,7 +177,7 @@ process_contract_remote_types(CodeServer, RecordDict) ->
-type fun_types() :: dict:dict(label(), erl_types:type_table()).
--spec check_contracts([{mfa(), file_contract()}],
+-spec check_contracts(orddict:orddict(mfa(), file_contract()),
dialyzer_callgraph:callgraph(), fun_types(),
opaques_fun()) -> plt_contracts().
@@ -206,7 +206,7 @@ check_contracts(Contracts, Callgraph, FunTypes, FindOpaques) ->
error -> NewContracts
end
end,
- dict:fold(FoldFun, [], FunTypes).
+ orddict:from_list(dict:fold(FoldFun, [], FunTypes)).
%% Checks all components of a contract
-spec check_contract(#contract{}, erl_types:erl_type()) -> 'ok' | {'error', term()}.
@@ -451,10 +451,10 @@ contract_from_form(Forms, MFA, RecDict, FileLine) ->
contract_from_form([{type, _, 'fun', [_, _]} = Form | Left], MFA, RecDict,
FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords, Cache) ->
+ fun(ExpTypes, RecordTable, Cache) ->
{NewType, NewCache} =
try
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache)
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache)
catch
throw:{error, Msg} ->
{File, Line} = FileLine,
@@ -472,12 +472,12 @@ contract_from_form([{type, _L1, bounded_fun,
[{type, _L2, 'fun', [_, _]} = Form, Constr]}| Left],
MFA, RecDict, FileLine, TypeAcc, FormAcc) ->
TypeFun =
- fun(ExpTypes, AllRecords, Cache) ->
+ fun(ExpTypes, RecordTable, Cache) ->
{Constr1, VarTable, Cache1} =
- process_constraints(Constr, MFA, RecDict, ExpTypes, AllRecords,
+ process_constraints(Constr, MFA, RecDict, ExpTypes, RecordTable,
Cache),
{NewType, NewCache} =
- from_form_with_check(Form, ExpTypes, MFA, AllRecords,
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable,
VarTable, Cache1),
NewTypeNoVars = erl_types:subst_all_vars_to_any(NewType),
{{NewTypeNoVars, Constr1}, NewCache}
@@ -488,28 +488,28 @@ contract_from_form([{type, _L1, bounded_fun,
contract_from_form([], _MFA, _RecDict, _FileLine, TypeAcc, FormAcc) ->
{lists:reverse(TypeAcc), lists:reverse(FormAcc)}.
-process_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+process_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
{Init0, NewCache} = initialize_constraints(Constrs, MFA, RecDict, ExpTypes,
- AllRecords, Cache),
+ RecordTable, Cache),
Init = remove_cycles(Init0),
- constraints_fixpoint(Init, MFA, RecDict, ExpTypes, AllRecords, NewCache).
+ constraints_fixpoint(Init, MFA, RecDict, ExpTypes, RecordTable, NewCache).
-initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
- initialize_constraints(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
+ initialize_constraints(Constrs, MFA, RecDict, ExpTypes, RecordTable,
Cache, []).
-initialize_constraints([], _MFA, _RecDict, _ExpTypes, _AllRecords,
+initialize_constraints([], _MFA, _RecDict, _ExpTypes, _RecordTable,
Cache, Acc) ->
{Acc, Cache};
-initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords,
+initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, RecordTable,
Cache, Acc) ->
case Constr of
{type, _, constraint, [{atom, _, is_subtype}, [Type1, Type2]]} ->
VarTable = erl_types:var_table__new(),
{T1, NewCache} =
- final_form(Type1, ExpTypes, MFA, AllRecords, VarTable, Cache),
+ final_form(Type1, ExpTypes, MFA, RecordTable, VarTable, Cache),
Entry = {T1, Type2},
- initialize_constraints(Rest, MFA, RecDict, ExpTypes, AllRecords,
+ initialize_constraints(Rest, MFA, RecDict, ExpTypes, RecordTable,
NewCache, [Entry|Acc]);
{type, _, constraint, [{atom,_,Name}, List]} ->
N = length(List),
@@ -517,18 +517,18 @@ initialize_constraints([Constr|Rest], MFA, RecDict, ExpTypes, AllRecords,
io_lib:format("Unsupported type guard ~w/~w\n", [Name, N])})
end.
-constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, AllRecords, Cache) ->
+constraints_fixpoint(Constrs, MFA, RecDict, ExpTypes, RecordTable, Cache) ->
VarTable = erl_types:var_table__new(),
{VarTab, NewCache} =
- constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTable, Cache),
constraints_fixpoint(VarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, NewCache).
+ RecordTable, NewCache).
constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, Cache) ->
+ RecordTable, Cache) ->
{NewVarTab, NewCache} =
- constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
OldVarTab, Cache),
case NewVarTab of
OldVarTab ->
@@ -540,38 +540,38 @@ constraints_fixpoint(OldVarTab, MFA, Constrs, RecDict, ExpTypes,
{FinalConstrs, NewVarTab, NewCache};
_Other ->
constraints_fixpoint(NewVarTab, MFA, Constrs, RecDict, ExpTypes,
- AllRecords, NewCache)
+ RecordTable, NewCache)
end.
-final_form(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) ->
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache).
+final_form(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache).
-from_form_with_check(Form, ExpTypes, MFA, AllRecords, Cache) ->
+from_form_with_check(Form, ExpTypes, MFA, RecordTable, Cache) ->
VarTable = erl_types:var_table__new(),
- from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache).
+ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache).
-from_form_with_check(Form, ExpTypes, MFA, AllRecords, VarTable, Cache) ->
+from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
Site = {spec, MFA},
- C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, AllRecords,
+ C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, RecordTable,
VarTable, Cache),
- erl_types:t_from_form(Form, ExpTypes, Site, AllRecords, VarTable, C1).
+ erl_types:t_from_form(Form, ExpTypes, Site, RecordTable, VarTable, C1).
-constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache) ->
{Subtypes, NewCache} =
- constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_subs(Constrs, MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache, []),
{insert_constraints(Subtypes), NewCache}.
-constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _AllRecords,
+constraints_to_subs([], _MFA, _RecDict, _ExpTypes, _RecordTable,
_VarTab, Cache, Acc) ->
{Acc, Cache};
-constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, AllRecords,
+constraints_to_subs([{T1, Form2}|Rest], MFA, RecDict, ExpTypes, RecordTable,
VarTab, Cache, Acc) ->
{T2, NewCache} =
- final_form(Form2, ExpTypes, MFA, AllRecords, VarTab, Cache),
+ final_form(Form2, ExpTypes, MFA, RecordTable, VarTab, Cache),
NewAcc = [{subtype, T1, T2}|Acc],
- constraints_to_subs(Rest, MFA, RecDict, ExpTypes, AllRecords,
+ constraints_to_subs(Rest, MFA, RecDict, ExpTypes, RecordTable,
VarTab, NewCache, NewAcc).
%% Replaces variables with '_' when necessary to break up cycles among
diff --git a/lib/dialyzer/src/dialyzer_coordinator.erl b/lib/dialyzer/src/dialyzer_coordinator.erl
index 99f95a4dca..7c1bc1de5a 100644
--- a/lib/dialyzer/src/dialyzer_coordinator.erl
+++ b/lib/dialyzer/src/dialyzer_coordinator.erl
@@ -76,6 +76,8 @@
active = 0 :: integer(),
result :: result(),
next_label = 0 :: integer(),
+ jobs :: [job()],
+ job_fun :: fun(),
init_data :: init_data(),
regulator :: regulator(),
scc_to_pid :: scc_to_pid()
@@ -108,16 +110,18 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
false -> unused
end,
Coordinator = {Collector, Regulator, SCCtoPID},
- Fold =
- fun(Job, Count) ->
- Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
- case TypesigOrDataflow of
- true -> true = ets:insert(SCCtoPID, {Job, Pid}), ok;
- false -> ok
- end,
- Count + 1
+ JobFun =
+ fun(Job) ->
+ Pid = dialyzer_worker:launch(Mode, Job, InitData, Coordinator),
+ case TypesigOrDataflow of
+ true -> true = ets:insert(SCCtoPID, {Job, Pid});
+ false -> true
+ end
end,
- JobCount = lists:foldl(Fold, 0, Jobs),
+ JobCount = length(Jobs),
+ NumberOfInitJobs = min(JobCount, 20 * dialyzer_utils:parallelism()),
+ {InitJobs, RestJobs} = lists:split(NumberOfInitJobs, Jobs),
+ lists:foreach(JobFun, InitJobs),
Unit =
case Mode of
'typesig' -> "SCCs";
@@ -129,11 +133,13 @@ spawn_jobs(Mode, Jobs, InitData, Timing) ->
'compile' -> dialyzer_analysis_callgraph:compile_init_result();
_ -> []
end,
- #state{mode = Mode, active = JobCount, result = InitResult, next_label = 0,
- init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}.
+ #state{mode = Mode, active = JobCount, result = InitResult,
+ next_label = 0, job_fun = JobFun, jobs = RestJobs,
+ init_data = InitData, regulator = Regulator, scc_to_pid = SCCtoPID}.
collect_result(#state{mode = Mode, active = Active, result = Result,
next_label = NextLabel, init_data = InitData,
+ jobs = JobsLeft, job_fun = JobFun,
regulator = Regulator, scc_to_pid = SCCtoPID} = State) ->
receive
{next_label_request, Estimation, Pid} ->
@@ -141,20 +147,35 @@ collect_result(#state{mode = Mode, active = Active, result = Result,
collect_result(State#state{next_label = NextLabel + Estimation});
{done, Job, Data} ->
NewResult = update_result(Mode, InitData, Job, Data, Result),
+ TypesigOrDataflow = (Mode =:= 'typesig') orelse (Mode =:= 'dataflow'),
case Active of
1 ->
kill_regulator(Regulator),
case Mode of
'compile' ->
{NewResult, NextLabel};
- X when X =:= 'typesig'; X =:= 'dataflow' ->
+ _ when TypesigOrDataflow ->
ets:delete(SCCtoPID),
NewResult;
'warnings' ->
NewResult
end;
N ->
- collect_result(State#state{result = NewResult, active = N - 1})
+ case TypesigOrDataflow of
+ true -> true = ets:delete(SCCtoPID, Job);
+ false -> true
+ end,
+ NewJobsLeft =
+ case JobsLeft of
+ [] -> [];
+ [NewJob|JobsLeft1] ->
+ JobFun(NewJob),
+ JobsLeft1
+ end,
+ NewState = State#state{result = NewResult,
+ jobs = NewJobsLeft,
+ active = N - 1},
+ collect_result(NewState)
end
end.
@@ -170,18 +191,20 @@ update_result(Mode, InitData, Job, Data, Result) ->
end.
-spec sccs_to_pids([scc() | module()], coordinator()) ->
- {[dialyzer_worker:worker()], [scc() | module()]}.
+ [dialyzer_worker:worker()].
sccs_to_pids(SCCs, {_Collector, _Regulator, SCCtoPID}) ->
Fold =
- fun(SCC, {Pids, Unknown}) ->
- try ets:lookup_element(SCCtoPID, SCC, 2) of
- Result -> {[Result|Pids], Unknown}
- catch
- _:_ -> {Pids, [SCC|Unknown]}
- end
+ fun(SCC, Pids) ->
+ %% The SCCs that SCC depends on have always been started.
+ try ets:lookup_element(SCCtoPID, SCC, 2) of
+ Pid when is_pid(Pid) ->
+ [Pid|Pids]
+ catch
+ _:_ -> Pids
+ end
end,
- lists:foldl(Fold, {[], []}, SCCs).
+ lists:foldl(Fold, [], SCCs).
-spec job_done(job(), job_result(), coordinator()) -> ok.
diff --git a/lib/dialyzer/src/dialyzer_dataflow.erl b/lib/dialyzer/src/dialyzer_dataflow.erl
index f706ebfb02..dc2238e63a 100644
--- a/lib/dialyzer/src/dialyzer_dataflow.erl
+++ b/lib/dialyzer/src/dialyzer_dataflow.erl
@@ -1363,7 +1363,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State, Warns) ->
{{Tag, PatTypes}, false};
false ->
%% Try to find out if this is a default clause in a list
- %% comprehension and supress this. A real Hack(tm)
+ %% comprehension and suppress this. A real Hack(tm)
Force0 =
case is_compiler_generated(cerl:get_ann(C)) of
true ->
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index 37c22fef48..eb63e9e695 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -31,9 +31,8 @@
included_files/1,
from_file/1,
get_default_plt/0,
- get_types/1,
+ get_module_types/2,
get_exported_types/1,
- %% insert/3,
insert_list/2,
insert_contract_list/2,
insert_callbacks/2,
@@ -143,6 +142,10 @@ delete_list(#plt{info = Info, types = Types,
-spec insert_contract_list(plt(), dialyzer_contracts:plt_contracts()) -> plt().
+insert_contract_list(#plt{contracts = Contracts} = PLT, List) ->
+ NewContracts = dict:merge(fun(_MFA, _Old, New) -> New end,
+ Contracts, dict:from_list(List)),
+ PLT#plt{contracts = NewContracts};
insert_contract_list(#mini_plt{contracts = Contracts} = PLT, List) ->
true = ets:insert(Contracts, List),
PLT.
@@ -184,20 +187,23 @@ lookup(Plt, Label) when is_integer(Label) ->
lookup_1(#mini_plt{info = Info}, MFAorLabel) ->
ets_table_lookup(Info, MFAorLabel).
--spec insert_types(plt(), erl_types:mod_records()) -> plt().
+-spec insert_types(plt(), ets:tid()) -> plt().
-insert_types(PLT, Rec) ->
- PLT#plt{types = Rec}.
+insert_types(MiniPLT, Records) ->
+ ets:rename(Records, plt_types),
+ MiniPLT#mini_plt{types = Records}.
--spec insert_exported_types(plt(), sets:set()) -> plt().
+-spec insert_exported_types(plt(), ets:tid()) -> plt().
-insert_exported_types(PLT, Set) ->
- PLT#plt{exported_types = Set}.
+insert_exported_types(MiniPLT, ExpTypes) ->
+ ets:rename(ExpTypes, plt_exported_types),
+ MiniPLT#mini_plt{exported_types = ExpTypes}.
--spec get_types(plt()) -> erl_types:mod_records().
+-spec get_module_types(plt(), atom()) ->
+ 'none' | {'value', erl_types:type_table()}.
-get_types(#plt{types = Types}) ->
- Types.
+get_module_types(#plt{types = Types}, M) when is_atom(M) ->
+ table_lookup(Types, M).
-spec get_exported_types(plt()) -> sets:set().
@@ -520,10 +526,12 @@ get_mini_plt(#plt{info = Info,
contracts = Contracts,
callbacks = Callbacks,
exported_types = ExpTypes}) ->
- [ETSInfo, ETSTypes, ETSContracts, ETSCallbacks, ETSExpTypes] =
+ [ETSInfo, ETSContracts] =
[ets:new(Name, [public]) ||
- Name <- [plt_info, plt_types, plt_contracts, plt_callbacks,
- plt_exported_types]],
+ Name <- [plt_info, plt_contracts]],
+ [ETSTypes, ETSCallbacks, ETSExpTypes] =
+ [ets:new(Name, [compressed, public]) ||
+ Name <- [plt_types, plt_callbacks, plt_exported_types]],
CallbackList = dict:to_list(Callbacks),
CallbacksByModule =
[{M, [Cb || {{M1,_,_},_} = Cb <- CallbackList, M1 =:= M]} ||
diff --git a/lib/dialyzer/src/dialyzer_succ_typings.erl b/lib/dialyzer/src/dialyzer_succ_typings.erl
index 3c90f46e95..be685baf22 100644
--- a/lib/dialyzer/src/dialyzer_succ_typings.erl
+++ b/lib/dialyzer/src/dialyzer_succ_typings.erl
@@ -29,7 +29,7 @@
-export([
find_succ_types_for_scc/2,
refine_one_module/2,
- find_required_by/2,
+ %% find_required_by/2,
find_depends_on/2,
collect_warnings/2,
lookup_names/2
@@ -236,10 +236,10 @@ refine_succ_typings(Modules, #st{codeserver = Codeserver,
find_depends_on(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
dialyzer_callgraph:get_depends_on(SCC, Callgraph).
--spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()].
+%% -spec find_required_by(scc() | module(), fixpoint_init_data()) -> [scc()].
-find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
- dialyzer_callgraph:get_required_by(SCC, Callgraph).
+%% find_required_by(SCC, {_Codeserver, Callgraph, _Plt, _Solvers}) ->
+%% dialyzer_callgraph:get_required_by(SCC, Callgraph).
-spec lookup_names([label()], fixpoint_init_data()) -> [mfa_or_funlbl()].
diff --git a/lib/dialyzer/src/dialyzer_typesig.erl b/lib/dialyzer/src/dialyzer_typesig.erl
index b33484bda4..c4f8adf7ee 100644
--- a/lib/dialyzer/src/dialyzer_typesig.erl
+++ b/lib/dialyzer/src/dialyzer_typesig.erl
@@ -81,7 +81,7 @@
-record(constraint_list, {type :: 'conj' | 'disj',
list :: [constr()],
deps :: deps(),
- masks = maps:new() :: #{dep() => mask()},
+ masks :: #{dep() => mask()} | 'undefined',
id :: {'list', dep()} | 'undefined'}).
-type constraint_list() :: #constraint_list{}.
@@ -181,7 +181,6 @@ analyze_scc(SCC, NextLabel, CallGraph, CServer, Plt, PropTypes, Solvers0) ->
M <- lists:usort([M || {M, _, _} <- SCC])],
State2 = traverse_scc(SCC, CServer, DefSet, ModRecs, State1),
State3 = state__finalize(State2),
- erlang:garbage_collect(),
Funs = state__scc(State3),
pp_constrs_scc(Funs, State3),
constraints_to_dot_scc(Funs, State3),
@@ -202,7 +201,8 @@ traverse_scc([{M,_,_}=MFA|Left], Codeserver, DefSet, ModRecs, AccState) ->
{M, Rec} = lists:keyfind(M, 1, ModRecs),
TmpState1 = state__set_rec_dict(AccState, Rec),
DummyLetrec = cerl:c_letrec([Def], cerl:c_atom(foo)),
- {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState1),
+ TmpState2 = state__new_constraint_context(TmpState1),
+ {NewAccState, _} = traverse(DummyLetrec, DefSet, TmpState2),
traverse_scc(Left, Codeserver, DefSet, ModRecs, NewAccState);
traverse_scc([], _Codeserver, _DefSet, _ModRecs, AccState) ->
AccState.
@@ -2080,6 +2080,8 @@ v2_solve_disjunct(Disj, Map, V2State0) ->
var_occurs_everywhere(V, Masks, NotFailed) ->
ordsets:is_subset(NotFailed, get_mask(V, Masks)).
+-dialyzer({no_improper_lists, [v2_solve_disj/10, v2_solve_conj/12]}).
+
v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval,
Failed0) ->
Id = C#constraint_list.id,
@@ -2098,6 +2100,12 @@ v2_solve_disj([I|Is], [C|Cs], I, Map0, V2State0, UL, MapL, Eval, Uneval,
end;
v2_solve_disj([], [], _I, _Map, V2State, UL, MapL, Eval, Uneval, Failed) ->
{ok, V2State, lists:reverse(Eval), UL, MapL, lists:reverse(Uneval), Failed};
+v2_solve_disj(every_i, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed) ->
+ NewIs = case Cs of
+ [] -> [];
+ _ -> [I|every_i]
+ end,
+ v2_solve_disj(NewIs, Cs, I, Map, V2State, UL, MapL, Eval, Uneval, Failed);
v2_solve_disj(Is, [C|Cs], I, Map, V2State, UL, MapL, Eval, Uneval0, Failed) ->
Uneval = [{I,C#constraint_list.id} ||
not is_failed_list(C, V2State)] ++ Uneval0,
@@ -2169,7 +2177,7 @@ v2_solve_conj([I|Is], [Cs|Tail], I, Map0, Conj, IsFlat, V2State0,
M = lists:keydelete(I, 1, vars_per_child(U, Masks)),
{V2State2, NewF0} = save_updated_vars_list(AllCs, M, V2State1),
{NewF, F} = lists:splitwith(fun(J) -> J < I end, NewF0),
- Is1 = lists:umerge(Is, F),
+ Is1 = umerge_mask(Is, F),
NewFs = [NewF|NewFs0],
v2_solve_conj(Is1, Tail, I+1, Map, Conj, IsFlat, V2State2,
[U|UL], NewFs, VarsUp, LastMap, LastFlags)
@@ -2191,6 +2199,14 @@ v2_solve_conj([], _Cs, _I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
v2_solve_conj(NewFlags, Cs, 1, Map, Conj, IsFlat, V2State,
[], [], [U|VarsUp], Map, NewFlags)
end;
+v2_solve_conj(every_i, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
+ LastMap, LastFlags) ->
+ NewIs = case Cs of
+ [] -> [];
+ _ -> [I|every_i]
+ end,
+ v2_solve_conj(NewIs, Cs, I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
+ LastMap, LastFlags);
v2_solve_conj(Is, [_|Tail], I, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
LastMap, LastFlags) ->
v2_solve_conj(Is, Tail, I+1, Map, Conj, IsFlat, V2State, UL, NewFs, VarsUp,
@@ -2207,7 +2223,12 @@ report_detected_loop(_) ->
add_mask_to_flags(Flags, [Im|M], I, L) when I > Im ->
add_mask_to_flags(Flags, M, I, [Im|L]);
add_mask_to_flags(Flags, [_|M], _I, L) ->
- {lists:umerge(M, Flags), lists:reverse(L)}.
+ {umerge_mask(Flags, M), lists:reverse(L)}.
+
+umerge_mask(every_i, _F) ->
+ every_i;
+umerge_mask(Is, F) ->
+ lists:umerge(Is, F).
get_mask(V, Masks) ->
case maps:find(V, Masks) of
@@ -2221,7 +2242,7 @@ get_flags(#v2_state{constr_data = ConData}=V2State0, C) ->
error ->
?debug("get_flags Id=~w Flags=all ~w\n", [Id, length(Cs)]),
V2State = V2State0#v2_state{constr_data = maps:put(Id, {[],[]}, ConData)},
- {V2State, lists:seq(1, length(Cs))};
+ {V2State, every_i};
{ok, failed} ->
{V2State0, failed_list};
{ok, {Part,U}} when U =/= [] ->
@@ -2901,8 +2922,9 @@ state__get_rec_var(Fun, #state{fun_map = Map}) ->
maps:find(Fun, Map).
state__finalize(State) ->
- State1 = enumerate_constraints(State),
- order_fun_constraints(State1).
+ State1 = state__new_constraint_context(State),
+ State2 = enumerate_constraints(State1),
+ order_fun_constraints(State2).
%% ============================================================================
%%
@@ -2982,7 +3004,7 @@ find_constraint_deps([Type|Tail], Acc) ->
NewAcc = [[t_var_name(D) || D <- t_collect_vars(Type)]|Acc],
find_constraint_deps(Tail, NewAcc);
find_constraint_deps([], Acc) ->
- lists:flatten(Acc).
+ lists:append(Acc).
mk_constraint_1(Lhs, eq, Rhs, Deps) when Lhs < Rhs ->
#constraint{lhs = Lhs, op = eq, rhs = Rhs, deps = Deps};
@@ -3090,8 +3112,8 @@ expand_to_conjunctions(#constraint_list{type = disj, list = List}) ->
List1 = [C || C <- List, is_simple_constraint(C)],
%% Just an assert.
[] = [C || #constraint{} = C <- List1],
- Expanded = lists:flatten([expand_to_conjunctions(C)
- || #constraint_list{} = C <- List]),
+ Expanded = lists:append([expand_to_conjunctions(C)
+ || #constraint_list{} = C <- List]),
ReturnList = Expanded ++ List1,
if length(ReturnList) > ?DISJ_NORM_FORM_LIMIT -> throw(too_many_disj);
true -> ReturnList
@@ -3116,8 +3138,10 @@ calculate_deps(List) ->
calculate_deps([H|Tail], Acc) ->
Deps = get_deps(H),
calculate_deps(Tail, [Deps|Acc]);
+calculate_deps([], []) -> [];
+calculate_deps([], [L]) -> L;
calculate_deps([], Acc) ->
- ordsets:from_list(lists:flatten(Acc)).
+ lists:umerge(Acc).
mk_conj_constraint_list(List) ->
mk_constraint_list(conj, List).
@@ -3185,7 +3209,8 @@ order_fun_constraints(State) ->
order_fun_constraints([#constraint_ref{id = Id}|Tail], State) ->
Cs = state__get_cs(Id, State),
- {[NewCs], State1} = order_fun_constraints([Cs], [], [], State),
+ {[Cs1], State1} = order_fun_constraints([Cs], [], [], State),
+ NewCs = Cs1#constraint_list{deps = Cs#constraint_list.deps},
NewState = state__store_constrs(Id, NewCs, State1),
order_fun_constraints(Tail, NewState);
order_fun_constraints([], State) ->
@@ -3193,23 +3218,31 @@ order_fun_constraints([], State) ->
order_fun_constraints([#constraint_ref{} = C|Tail], Funs, Acc, State) ->
order_fun_constraints(Tail, [C|Funs], Acc, State);
-order_fun_constraints([#constraint_list{list = List, type = Type} = C|Tail],
+order_fun_constraints([#constraint_list{list = List,
+ type = Type,
+ masks = OldMasks} = C|Tail],
Funs, Acc, State) ->
- {NewList, NewState} =
- case Type of
- conj -> order_fun_constraints(List, [], [], State);
- disj ->
- FoldFun = fun(X, AccState) ->
- {[NewX], NewAccState} =
- order_fun_constraints([X], [], [], AccState),
- {NewX, NewAccState}
- end,
- lists:mapfoldl(FoldFun, State, List)
- end,
- C1 = update_constraint_list(C, NewList),
- Masks = calculate_masks(NewList, 1, []),
- NewAcc = [update_masks(C1, Masks)|Acc],
- order_fun_constraints(Tail, Funs, NewAcc, NewState);
+ case OldMasks of
+ undefined ->
+ {NewList, NewState} =
+ case Type of
+ conj -> order_fun_constraints(List, [], [], State);
+ disj ->
+ FoldFun = fun(X, AccState) ->
+ {[NewX], NewAccState} =
+ order_fun_constraints([X], [], [], AccState),
+ {NewX, NewAccState}
+ end,
+ lists:mapfoldl(FoldFun, State, List)
+ end,
+ NewList2 = reset_deps(NewList, State),
+ C1 = update_constraint_list(C, NewList2),
+ Masks = calculate_masks(NewList, 1, []),
+ NewAcc = [update_masks(C1, Masks)|Acc],
+ order_fun_constraints(Tail, Funs, NewAcc, NewState);
+ M when is_map(M) ->
+ order_fun_constraints(Tail, Funs, [C|Acc], State)
+ end;
order_fun_constraints([#constraint{} = C|Tail], Funs, Acc, State) ->
order_fun_constraints(Tail, Funs, [C|Acc], State);
order_fun_constraints([], Funs, Acc, State) ->
@@ -3219,6 +3252,18 @@ order_fun_constraints([], Funs, Acc, State) ->
update_masks(C, Masks) ->
C#constraint_list{masks = Masks}.
+reset_deps(ConstrList, #state{solvers = Solvers}) ->
+ case lists:member(v1, Solvers) of
+ true ->
+ ConstrList;
+ false ->
+ [reset_deps(Constr) || Constr <- ConstrList]
+ end.
+
+reset_deps(#constraint{}=C) -> C#constraint{deps = []};
+reset_deps(#constraint_list{}=C) -> C#constraint_list{deps = []};
+reset_deps(#constraint_ref{}=C) -> C#constraint_ref{deps = []}.
+
calculate_masks([C|Cs], I, L0) ->
calculate_masks(Cs, I+1, [{V, I} || V <- get_deps(C)] ++ L0);
calculate_masks([], _I, L) ->
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 432d27571b..9eaf95c1a2 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -37,9 +37,9 @@
get_fun_meta_info/3,
is_suppressed_fun/2,
is_suppressed_tag/3,
- merge_records/2,
pp_hook/0,
process_record_remote_types/1,
+ merge_types/2,
sets_filter/2,
src_compiler_opts/0,
refold_pattern/1,
@@ -188,7 +188,6 @@ get_core_from_abstract_code(AbstrCode, Opts) ->
%% ============================================================================
-type type_table() :: erl_types:type_table().
--type mod_records() :: dict:dict(module(), type_table()).
-spec get_record_and_type_info(abstract_code()) ->
{'ok', type_table()} | {'error', string()}.
@@ -289,18 +288,18 @@ get_record_fields([{record_field, _Line, Name, _Init}|Left], RecDict, Acc) ->
get_record_fields([], _RecDict, Acc) ->
lists:reverse(Acc).
--spec process_record_remote_types(codeserver()) ->
- {codeserver(), mod_records()}.
+-spec process_record_remote_types(codeserver()) -> codeserver().
%% The field types are cached. Used during analysis when handling records.
process_record_remote_types(CServer) ->
- TempRecords = dialyzer_codeserver:get_temp_records(CServer),
ExpTypes = dialyzer_codeserver:get_exported_types(CServer),
- TempRecords1 = process_opaque_types0(TempRecords, ExpTypes),
- %% A cache (not the field type cache) is used for speeding things up a bit.
+ Mods = dialyzer_codeserver:all_temp_modules(CServer),
+ process_opaque_types0(Mods, CServer, ExpTypes),
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
ModuleFun =
- fun({Module, Record}) ->
+ fun(Module) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
RecordFun =
fun({Key, Value}, C2) ->
case Key of
@@ -313,7 +312,7 @@ process_record_remote_types(CServer) ->
{FieldT, C6} =
erl_types:t_from_form
(Field, ExpTypes, Site,
- TempRecords1, VarTable,
+ RecordTable, VarTable,
C5),
{{FieldName, Field, FieldT}, C6}
end, C4, Fields),
@@ -328,30 +327,29 @@ process_record_remote_types(CServer) ->
end,
Cache = erl_types:cache__new(),
{RecordList, _NewCache} =
- lists:mapfoldl(RecordFun, Cache, maps:to_list(Record)),
- {Module, maps:from_list(RecordList)}
+ lists:mapfoldl(RecordFun, Cache, maps:to_list(RecordMap)),
+ dialyzer_codeserver:store_temp_records(Module,
+ maps:from_list(RecordList),
+ CServer)
end,
- NewRecordsList = lists:map(ModuleFun, dict:to_list(TempRecords1)),
- NewRecords = dict:from_list(NewRecordsList),
- check_record_fields(NewRecords, ExpTypes),
- {dialyzer_codeserver:finalize_records(NewRecords, CServer), NewRecords}.
+ lists:foreach(ModuleFun, Mods),
+ check_record_fields(Mods, CServer, ExpTypes),
+ dialyzer_codeserver:finalize_records(CServer).
%% erl_types:t_from_form() substitutes the declaration of opaque types
%% for the expanded type in some cases. To make sure the initial type,
%% any(), is not used, the expansion is done twice.
%% XXX: Recursive opaque types are not handled well.
-process_opaque_types0(TempRecords0, TempExpTypes) ->
- Cache = erl_types:cache__new(),
- {TempRecords1, Cache1} =
- process_opaque_types(TempRecords0, TempExpTypes, Cache),
- {TempRecords, _NewCache} =
- process_opaque_types(TempRecords1, TempExpTypes, Cache1),
- TempRecords.
-
-process_opaque_types(TempRecords, TempExpTypes, Cache) ->
+process_opaque_types0(AllModules, CServer, TempExpTypes) ->
+ process_opaque_types(AllModules, CServer, TempExpTypes),
+ process_opaque_types(AllModules, CServer, TempExpTypes).
+
+process_opaque_types(AllModules, CServer, TempExpTypes) ->
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
ModuleFun =
- fun({Module, Record}, C0) ->
+ fun(Module) ->
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
RecordFun =
fun({Key, Value}, C2) ->
case Key of
@@ -360,32 +358,32 @@ process_opaque_types(TempRecords, TempExpTypes, Cache) ->
Site = {type, {Module, Name, NArgs}},
{Type, C3} =
erl_types:t_from_form(Form, TempExpTypes, Site,
- TempRecords, VarTable, C2),
+ RecordTable, VarTable, C2),
{{Key, {F, Type}}, C3};
_Other -> {{Key, Value}, C2}
end
end,
- {RecordList, C1} =
- lists:mapfoldl(RecordFun, C0, maps:to_list(Record)),
- {{Module, maps:from_list(RecordList)}, C1}
- %% dict:map(RecordFun, Record)
+ C0 = erl_types:cache__new(),
+ {RecordList, _NewCache} =
+ lists:mapfoldl(RecordFun, C0, maps:to_list(RecordMap)),
+ dialyzer_codeserver:store_temp_records(Module,
+ maps:from_list(RecordList),
+ CServer)
end,
- {TempRecordList, NewCache} =
- lists:mapfoldl(ModuleFun, Cache, dict:to_list(TempRecords)),
- {dict:from_list(TempRecordList), NewCache}.
- %% dict:map(ModuleFun, TempRecords).
+ lists:foreach(ModuleFun, AllModules).
-check_record_fields(Records, TempExpTypes) ->
- Cache = erl_types:cache__new(),
+check_record_fields(AllModules, CServer, TempExpTypes) ->
VarTable = erl_types:var_table__new(),
+ RecordTable = dialyzer_codeserver:get_temp_records_table(CServer),
CheckFun =
- fun({Module, Element}, C0) ->
+ fun(Module) ->
CheckForm = fun(Form, Site, C1) ->
erl_types:t_check_record_fields(Form, TempExpTypes,
- Site, Records,
+ Site, RecordTable,
VarTable, C1)
end,
- ElemFun =
+ RecordMap = dialyzer_codeserver:lookup_temp_mod_records(Module, CServer),
+ RecordFun =
fun({Key, Value}, C2) ->
case Key of
{record, Name} ->
@@ -406,10 +404,10 @@ check_record_fields(Records, TempExpTypes) ->
msg_with_position(Fun, FileLine)
end
end,
- lists:foldl(ElemFun, C0, maps:to_list(Element))
+ C0 = erl_types:cache__new(),
+ _ = lists:foldl(RecordFun, C0, maps:to_list(RecordMap))
end,
- _NewCache = lists:foldl(CheckFun, Cache, dict:to_list(Records)),
- ok.
+ lists:foreach(CheckFun, AllModules).
msg_with_position(Fun, FileLine) ->
try Fun()
@@ -421,10 +419,37 @@ msg_with_position(Fun, FileLine) ->
throw({error, NewMsg})
end.
--spec merge_records(mod_records(), mod_records()) -> mod_records().
+-spec merge_types(codeserver(), dialyzer_plt:plt()) -> codeserver().
-merge_records(NewRecords, OldRecords) ->
- dict:merge(fun(_Key, NewVal, _OldVal) -> NewVal end, NewRecords, OldRecords).
+merge_types(CServer, Plt) ->
+ AllNewModules = dialyzer_codeserver:all_temp_modules(CServer),
+ AllNewModulesSet = sets:from_list(AllNewModules),
+ AllOldModulesSet = dialyzer_plt:all_modules(Plt),
+ AllModulesSet = sets:union(AllNewModulesSet, AllOldModulesSet),
+ ModuleFun =
+ fun(Module) ->
+ KeepOldFun =
+ fun() ->
+ case dialyzer_plt:get_module_types(Plt, Module) of
+ none -> no;
+ {value, OldRecords} ->
+ case sets:is_element(Module, AllNewModulesSet) of
+ true -> no;
+ false -> {yes, OldRecords}
+ end
+ end
+ end,
+ Records =
+ case KeepOldFun() of
+ no ->
+ dialyzer_codeserver:lookup_temp_mod_records(Module, CServer);
+ {yes, OldRecords} ->
+ OldRecords
+ end,
+ dialyzer_codeserver:store_temp_records(Module, Records, CServer)
+ end,
+ lists:foreach(ModuleFun, sets:to_list(AllModulesSet)),
+ CServer.
%% ============================================================================
%%
diff --git a/lib/dialyzer/src/dialyzer_worker.erl b/lib/dialyzer/src/dialyzer_worker.erl
index 418c9798b3..af0f2e9e08 100644
--- a/lib/dialyzer/src/dialyzer_worker.erl
+++ b/lib/dialyzer/src/dialyzer_worker.erl
@@ -56,10 +56,14 @@ launch(Mode, Job, InitData, Coordinator) ->
%%--------------------------------------------------------------------
-init(#state{job = SCC, mode = Mode, init_data = InitData} = State) when
+init(#state{job = SCC, mode = Mode, init_data = InitData,
+ coordinator = Coordinator} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- DependsOn = dialyzer_succ_typings:find_depends_on(SCC, InitData),
- ?debug("Deps ~p: ~p\n",[SCC, DependsOn]),
+ DependsOnSCCs = dialyzer_succ_typings:find_depends_on(SCC, InitData),
+ ?debug("~w: Deps ~p: ~p\n", [self(), SCC, DependsOnSCCs]),
+ Pids = dialyzer_coordinator:sccs_to_pids(DependsOnSCCs, Coordinator),
+ ?debug("~w: PidsDeps ~p\n", [self(), Pids]),
+ DependsOn = [{Pid, erlang:monitor(process, Pid)} || Pid <- Pids],
loop(updating, State#state{depends_on = DependsOn});
init(#state{mode = Mode} = State) when
Mode =:= 'compile'; Mode =:= 'warnings' ->
@@ -67,7 +71,7 @@ init(#state{mode = Mode} = State) when
loop(updating, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- ?debug("Update: ~p\n",[State#state.job]),
+ ?debug("~w: Update: ~p\n", [self(), State#state.job]),
NextStatus =
case waits_more_success_typings(State) of
true -> waiting;
@@ -76,11 +80,11 @@ loop(updating, #state{mode = Mode} = State) when
loop(NextStatus, State);
loop(waiting, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
- ?debug("Wait: ~p\n",[State#state.job]),
+ ?debug("~w: Wait: ~p\n", [self(), State#state.job]),
NewState = wait_for_success_typings(State),
loop(updating, NewState);
loop(running, #state{mode = 'compile'} = State) ->
- dialyzer_coordinator:request_activation(State#state.coordinator),
+ request_activation(State),
?debug("Compile: ~s\n",[State#state.job]),
Result =
case start_compilation(State) of
@@ -92,51 +96,28 @@ loop(running, #state{mode = 'compile'} = State) ->
end,
report_to_coordinator(Result, State);
loop(running, #state{mode = 'warnings'} = State) ->
- dialyzer_coordinator:request_activation(State#state.coordinator),
+ request_activation(State),
?debug("Warning: ~s\n",[State#state.job]),
Result = collect_warnings(State),
report_to_coordinator(Result, State);
loop(running, #state{mode = Mode} = State) when
Mode =:= 'typesig'; Mode =:= 'dataflow' ->
request_activation(State),
- ?debug("Run: ~p\n",[State#state.job]),
+ ?debug("~w: Run: ~p\n", [self(), State#state.job]),
NotFixpoint = do_work(State),
- ok = broadcast_done(State),
report_to_coordinator(NotFixpoint, State).
waits_more_success_typings(#state{depends_on = Depends}) ->
Depends =/= [].
-broadcast_done(#state{job = SCC, init_data = InitData,
- coordinator = Coordinator}) ->
- RequiredBy = dialyzer_succ_typings:find_required_by(SCC, InitData),
- {Callers, Unknown} =
- dialyzer_coordinator:sccs_to_pids(RequiredBy, Coordinator),
- send_done(Callers, SCC),
- continue_broadcast_done(Unknown, SCC, Coordinator).
-
-send_done(Callers, SCC) ->
- ?debug("Sending ~p: ~p\n",[SCC, Callers]),
- SendSTFun = fun(PID) -> PID ! {done, SCC} end,
- lists:foreach(SendSTFun, Callers).
-
-continue_broadcast_done([], _SCC, _Coordinator) -> ok;
-continue_broadcast_done(Rest, SCC, Coordinator) ->
- %% This time limit should be greater than the time required
- %% by the coordinator to spawn all processes.
- timer:sleep(500),
- {Callers, Unknown} = dialyzer_coordinator:sccs_to_pids(Rest, Coordinator),
- send_done(Callers, SCC),
- continue_broadcast_done(Unknown, SCC, Coordinator).
-
wait_for_success_typings(#state{depends_on = DependsOn} = State) ->
receive
- {done, SCC} ->
- ?debug("GOT ~p: ~p\n",[State#state.job, SCC]),
- State#state{depends_on = DependsOn -- [SCC]}
+ {'DOWN', Ref, process, Pid, _Info} ->
+ ?debug("~w: ~p got DOWN: ~p\n", [self(), State#state.job, Pid]),
+ State#state{depends_on = DependsOn -- [{Pid, Ref}]}
after
5000 ->
- ?debug("Still Waiting ~p: ~p\n",[State#state.job, DependsOn]),
+ ?debug("~w: Still Waiting ~p:\n ~p\n", [self(), State#state.job, DependsOn]),
State
end.
@@ -150,7 +131,7 @@ do_work(#state{mode = Mode, job = Job, init_data = InitData}) ->
end.
report_to_coordinator(Result, #state{job = Job, coordinator = Coordinator}) ->
- ?debug("Done: ~p\n",[Job]),
+ ?debug("~w: Done: ~p\n",[self(), Job]),
dialyzer_coordinator:job_done(Job, Result, Coordinator).
start_compilation(#state{job = Job, init_data = InitData}) ->
diff --git a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
index cb6a88786e..365b4798c5 100644
--- a/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/behaviour_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
{dialyzer_options, []}.
-{time_limit, 2}.
+{time_limit, 5}.
diff --git a/lib/dialyzer/test/map_SUITE_data/dialyzer_options b/lib/dialyzer/test/map_SUITE_data/dialyzer_options
index 50991c9bc5..02425c33f2 100644
--- a/lib/dialyzer/test/map_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/map_SUITE_data/dialyzer_options
@@ -1 +1,2 @@
{dialyzer_options, []}.
+{time_limit, 30}.
diff --git a/lib/dialyzer/test/map_SUITE_data/results/map_galore b/lib/dialyzer/test/map_SUITE_data/results/map_galore
index 6ea88f01f8..c34ba5cf30 100644
--- a/lib/dialyzer/test/map_SUITE_data/results/map_galore
+++ b/lib/dialyzer/test/map_SUITE_data/results/map_galore
@@ -20,9 +20,9 @@ map_galore.erl:186: The pattern #{'x':=2} can never match the type #{'x':=3}
map_galore.erl:187: The pattern #{'x':=3} can never match the type {'a','b','c'}
map_galore.erl:188: The pattern #{'x':=3} can never match the type #{'y':=3}
map_galore.erl:189: The pattern #{'x':=3} can never match the type #{'x':=[101 | 104 | 114 | 116,...]}
-map_galore.erl:2304: Cons will produce an improper list since its 2nd argument is {'b','a'}
-map_galore.erl:2304: The call maps:from_list(nonempty_improper_list({'a','b'},{'b','a'})) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
-map_galore.erl:2305: The call maps:from_list('a') will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
-map_galore.erl:2306: The call maps:from_list(42) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:2280: Cons will produce an improper list since its 2nd argument is {'b','a'}
+map_galore.erl:2280: The call maps:from_list(nonempty_improper_list({'a','b'},{'b','a'})) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:2281: The call maps:from_list('a') will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
+map_galore.erl:2282: The call maps:from_list(42) will never return since it differs in the 1st argument from the success typing arguments: ([{_,_}])
map_galore.erl:997: A key of type 'nonexisting' cannot exist in a map of type #{}
map_galore.erl:998: A key of type 'nonexisting' cannot exist in a map of type #{1:='a', 2:='b', 4:='d', 5:='e', float()=>'c'}
diff --git a/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl b/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl
index 2611241379..99eb73a5f6 100644
--- a/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl
+++ b/lib/dialyzer/test/map_SUITE_data/src/map_galore.erl
@@ -2070,11 +2070,8 @@ t_bif_map_values(Config) when is_list(Config) ->
ok.
t_erlang_hash(Config) when is_list(Config) ->
-
ok = t_bif_erlang_phash2(),
ok = t_bif_erlang_phash(),
- ok = t_bif_erlang_hash(),
-
ok.
t_bif_erlang_phash2() ->
@@ -2117,27 +2114,6 @@ t_bif_erlang_phash() ->
2620391445 = erlang:phash(M2,Sz), % 3590546636
ok.
-t_bif_erlang_hash() ->
- Sz = 1 bsl 27 - 1,
- 39684169 = erlang:hash(#{},Sz), % 5158
- 33673142 = erlang:hash(#{ a => 1, "a" => 2, <<"a">> => 3, {a,b} => 4 },Sz), % 71555838
- 95337869 = erlang:hash(#{ 1 => a, 2 => "a", 3 => <<"a">>, 4 => {a,b} },Sz), % 5497225
- 108959561 = erlang:hash(#{ 1 => a },Sz), % 126071654
- 59623150 = erlang:hash(#{ a => 1 },Sz), % 126426236
-
- 42775386 = erlang:hash(#{{} => <<>>},Sz), % 101655720
- 71692856 = erlang:hash(#{<<>> => {}},Sz), % 101655720
-
- M0 = #{ a => 1, "key" => <<"value">> },
- M1 = maps:remove("key",M0),
- M2 = M1#{ "key" => <<"value">> },
-
- 70254632 = erlang:hash(M0,Sz), % 38260486
- 59623150 = erlang:hash(M1,Sz), % 126426236
- 70254632 = erlang:hash(M2,Sz), % 38260486
- ok.
-
-
t_map_encode_decode(Config) when is_list(Config) ->
<<131,116,0,0,0,0>> = erlang:term_to_binary(#{}),
Pairs = [
diff --git a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
index 06ed52043a..cb301ff6a1 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
+++ b/lib/dialyzer/test/opaque_SUITE_data/dialyzer_options
@@ -1,2 +1,2 @@
{dialyzer_options, [{warnings, [no_unused, no_return]}]}.
-{time_limit, 20}.
+{time_limit, 40}.
diff --git a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl
index 6a5b593db0..53b08cc5c9 100644
--- a/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl
+++ b/lib/dialyzer/test/opaque_SUITE_data/src/recrec/dialyzer_dataflow.erl
@@ -1340,7 +1340,7 @@ do_clause(C, Arg, ArgType0, OrigArgType, Map, State) ->
{{Tag, PatTypes}, false};
false ->
%% Try to find out if this is a default clause in a list
- %% comprehension and supress this. A real Hack(tm)
+ %% comprehension and suppress this. A real Hack(tm)
Force0 =
case is_compiler_generated(cerl:get_ann(C)) of
true ->
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
index 0108f91b7f..cf2cbe8e2b 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/beam_disasm.erl
@@ -565,7 +565,7 @@ resolve_inst({make_fun2,Args},_,_,Lbls,Lambdas) ->
[OldIndex] = resolve_args(Args),
{value,{OldIndex,{F,A,_Lbl,_Index,NumFree,OldUniq}}} =
lists:keysearch(OldIndex, 1, Lambdas),
- [{_,{M,_,_}}|_] = Lbls, % Slighly kludgy.
+ [{_,{M,_,_}}|_] = Lbls, % Slightly kludgy.
{make_fun2,{M,F,A},OldIndex,OldUniq,NumFree};
resolve_inst(Instr, Imports, Str, Lbls, _Lambdas) ->
resolve_inst(Instr, Imports, Str, Lbls).
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
index 95d2076ccf..8fca202b8c 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/cerl_inline.erl
@@ -951,7 +951,7 @@ i_letrec(Es, B, Xs, Ctxt, Ren, Env, S) ->
%% Finally, we create new letrec-bindings for any and all
%% residualised definitions. All referenced functions should have
- %% been visited; the call to `visit' below is expected to retreive a
+ %% been visited; the call to `visit' below is expected to retrieve a
%% cached expression.
Rs1 = keep_referenced(Rs, S4),
{Es1, S5} = mapfoldl(fun (R, S) ->
@@ -997,7 +997,7 @@ i_apply(E, Ctxt, Ren, Env, S) ->
%% location could be recycled after the flag has been tested, but
%% there is no real advantage to that, because in practice, only
%% 4-5% of all created store locations will ever be reused, while
- %% there will be a noticable overhead for managing the free list.)
+ %% there will be a noticeable overhead for managing the free list.)
case st__get_app_inlined(L, S3) of
true ->
%% The application was inlined, so we have the final
@@ -2007,7 +2007,7 @@ residualize_operand(Opnd, E, S) ->
case st__get_opnd_effect(Opnd#opnd.loc, S) of
true ->
%% The operand has not been visited, so we do that now, but
- %% in `effect' context. (Waddell's algoritm does some stuff
+ %% in `effect' context. (Waddell's algorithm does some stuff
%% here to account specially for the operand size, which
%% appears unnecessary.)
{E1, S1} = i(Opnd#opnd.expr, effect, Opnd#opnd.ren,
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
index 01c2512397..76ae871aee 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/rec_env.erl
@@ -469,7 +469,7 @@ get(Key, Env) ->
-define(MINIMUM_RANGE, 1000).
-define(START_RANGE_FACTOR, 50).
-define(MAX_RETRIES, 2). % retries before enlarging range
--define(ENLARGE_FACTOR, 10). % range enlargment factor
+-define(ENLARGE_FACTOR, 10). % range enlargement factor
-ifdef(DEBUG).
%% If you want to use these process dictionary counters, make sure to
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
index 49a95a95e5..69139cd568 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/sys_pre_expand.erl
@@ -316,7 +316,7 @@ record_test_in_guard(Line, Term, Name, Vs, St) ->
%% code bloat.)
%% (4) Xref may be run on the abstract code, so the name in the
%% abstract code must be erlang:is_record/3.
- %% (5) To achive both (3) and (4) at the same time, set the name
+ %% (5) To achieve both (3) and (4) at the same time, set the name
%% here to erlang:is_record/3, but mark it as compiler-generated.
%% The v3_core pass will change the name to erlang:internal_is_record/3.
Fs = record_fields(Name, St),
diff --git a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
index 33a322b466..acb49b5faf 100644
--- a/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
+++ b/lib/dialyzer/test/options1_SUITE_data/src/compiler/v3_codegen.erl
@@ -1667,7 +1667,7 @@ bs_function({function,Name,Arity,CLabel,Asm0}=Func) ->
%%%
%%% Pass 1: Found out which bs_restore's that are needed. For now we assume
-%%% that a bs_restore is needed unless it is directly preceeded by a bs_save.
+%%% that a bs_restore is needed unless it is directly preceded by a bs_save.
%%%
bs_needed([{bs_save,Name},{bs_restore,Name}|T], N, _BsUsed, Dict) ->
diff --git a/lib/dialyzer/test/plt_SUITE.erl b/lib/dialyzer/test/plt_SUITE.erl
index 460d4e2240..fbfa979e1b 100644
--- a/lib/dialyzer/test/plt_SUITE.erl
+++ b/lib/dialyzer/test/plt_SUITE.erl
@@ -26,6 +26,8 @@ build_plt(Config) ->
end.
beam_tests(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "beam_tests.plt"),
Prog = <<"
-module(no_auto_import).
@@ -42,10 +44,12 @@ beam_tests(Config) when is_list(Config) ->
">>,
Opts = [no_auto_import],
{ok, BeamFile} = compile(Config, Prog, no_auto_import, Opts),
- [] = run_dialyzer(plt_build, [BeamFile], []),
+ [] = run_dialyzer(plt_build, [BeamFile], [{output_plt, Plt}]),
ok.
run_plt_check(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "run_plt_check.plt"),
Mod1 = <<"
-module(run_plt_check1).
">>,
@@ -56,7 +60,7 @@ run_plt_check(Config) when is_list(Config) ->
{ok, BeamFile1} = compile(Config, Mod1, run_plt_check1, []),
{ok, BeamFile2} = compile(Config, Mod2A, run_plt_check2, []),
- [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], []),
+ [] = run_dialyzer(plt_build, [BeamFile1, BeamFile2], [{output_plt, Plt}]),
Mod2B = <<"
-module(run_plt_check2).
@@ -70,11 +74,13 @@ run_plt_check(Config) when is_list(Config) ->
% callgraph warning as run_plt_check2:call/1 makes a call to unexported
% function run_plt_check1:call/1.
- [_] = run_dialyzer(plt_check, [], []),
+ [_] = run_dialyzer(plt_check, [], [{init_plt, Plt}]),
ok.
run_succ_typings(Config) when is_list(Config) ->
+ PrivDir = ?config(priv_dir, Config),
+ Plt = filename:join(PrivDir, "run_succ_typings.plt"),
Mod1A = <<"
-module(run_succ_typings1).
@@ -84,7 +90,7 @@ run_succ_typings(Config) when is_list(Config) ->
">>,
{ok, BeamFile1} = compile(Config, Mod1A, run_succ_typings1, []),
- [] = run_dialyzer(plt_build, [BeamFile1], []),
+ [] = run_dialyzer(plt_build, [BeamFile1], [{output_plt, Plt}]),
Mod1B = <<"
-module(run_succ_typings1).
@@ -107,9 +113,11 @@ run_succ_typings(Config) when is_list(Config) ->
{ok, BeamFile2} = compile(Config, Mod2, run_succ_typings2, []),
% contract types warning as run_succ_typings2:call/0 makes a call to
% run_succ_typings1:call/0, which returns a (not b) in the PLT.
- [_] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, false}]),
+ [_] = run_dialyzer(succ_typings, [BeamFile2],
+ [{check_plt, false}, {init_plt, Plt}]),
% warning not returned as run_succ_typings1 is updated in the PLT.
- [] = run_dialyzer(succ_typings, [BeamFile2], [{check_plt, true}]),
+ [] = run_dialyzer(succ_typings, [BeamFile2],
+ [{check_plt, true}, {init_plt, Plt}]),
ok.
@@ -252,12 +260,9 @@ remove_plt(Config) ->
ok.
bad_dialyzer_attr(Config) ->
- PrivDir = ?config(priv_dir, Config),
-
Prog1 = <<"-module(dial).
-dialyzer({no_return, [undef/0]}).">>,
{ok, Beam1} = compile(Config, Prog1, dial, []),
- Plt = filename:join(PrivDir, "bad_attr.plt"),
{dialyzer_error,
"Analysis failed with error:\n"
"Could not scan the following file(s):\n"
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
index ed38b2f915..3829479a94 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct.erl
@@ -520,7 +520,7 @@ save_automatic_tagged_types([_M|Ms]) ->
%% remove_in_set_imports/3 :
%% input: list with tuples of each module's imports and module name
%% respectively.
-%% output: one list with same format but each occured import from a
+%% output: one list with same format but each occurred import from a
%% module in the input set (IMNameL) is removed.
remove_in_set_imports([{{imports,ImpL},_ModName}|Rest],InputMNameL,Acc) ->
NewImpL = remove_in_set_imports1(ImpL,InputMNameL,[]),
@@ -1628,7 +1628,7 @@ tlv_tag1(<<1:1,PartialTag:7,Buffer/binary>>,Acc) ->
tlv_tag1(Buffer,(Acc bsl 7) bor PartialTag).
%% reads the content from the configuration file and returns the
-%% selected part choosen by InfoType. Assumes that the config file
+%% selected part chosen by InfoType. Assumes that the config file
%% content is an Erlang term.
read_config_file(ModuleName,InfoType) when atom(InfoType) ->
CfgList = read_config_file(ModuleName),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
index c26b8f851b..a4f39bde74 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_check.erl
@@ -4028,7 +4028,7 @@ check_sequence(S,Type,Comps) ->
{CRelInf,NewComps2} = componentrelation_leadingattr(S,NewComps),
% io:format("CRelInf: ~p~n",[CRelInf]),
% io:format("NewComps2: ~p~n",[NewComps2]),
- %% CompListWithTblInf has got a lot unecessary info about
+ %% CompListWithTblInf has got a lot unnecessary info about
%% the involved class removed, as the class of the object
%% set.
CompListWithTblInf = get_tableconstraint_info(S,Type,NewComps2),
@@ -4686,7 +4686,7 @@ any_component_relation(_,[],_,_,Acc) ->
%% evaluate_atpath/4 finds out whether the at notation refers to the
%% search level. The list of referenced names in the AtNot list shall
%% begin with a name that exists on the level it refers to. If the
-%% found AtPath is refering to the same sub-branch as the simple table
+%% found AtPath is referring to the same sub-branch as the simple table
%% has, then there shall not be any leading attribute info on this
%% level.
evaluate_atpath(_,[],Cnames,{innermost,AtPath=[Ref|_Refs]}) ->
@@ -4857,7 +4857,7 @@ innertype_comprel1(S,T = #type{def=Def,constraint=Cons,tablecinf=TCI},Path) ->
case Cons of
[{componentrelation,{_,_,ObjectSet},AtList}|_Rest] ->
%% This AtList must have an "outermost" at sign to be
- %% relevent here.
+ %% relevant here.
[{_,AL=[#'Externalvaluereference'{value=_Attr}|_R1]}|_R2]
= AtList,
%% #'ObjectClassFieldType'{class=ClassDef} = Def,
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
index 392896932a..0b5ea85681 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber.erl
@@ -1259,7 +1259,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
end,
case DecObjInf of
{Cname,ObjSet} -> % this must be the component were an object is
- %% choosen from the object set according to the table
+ %% chosen from the object set according to the table
%% constraint.
{[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
PostpDec};
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
index 9725da4d11..fb9ffb13db 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_constructed_ber_bin_v2.erl
@@ -1096,7 +1096,7 @@ gen_dec_line(Erules,TopType,Cname,CTags,Type,OptOrMand,DecObjInf) ->
end,
case DecObjInf of
{Cname,ObjSet} -> % this must be the component were an object is
- %% choosen from the object set according to the table
+ %% chosen from the object set according to the table
%% constraint.
{[{ObjSet,Cname,asn1ct_gen:mk_var(asn1ct_name:curr(term))}],
PostpDec};
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
index 5f8c7a0de8..32676b3448 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1ct_parser2.erl
@@ -2721,7 +2721,7 @@ prioritize_error(ErrList) ->
end,
NewErrList),
case SplitErrs of
- {[],UndefPosErrs} -> % if no error with Positon exists
+ {[],UndefPosErrs} -> % if no error with Position exists
lists:last(UndefPosErrs);
{IntPosErrs,_} ->
IntPosReasons = lists:map(fun(X)->element(2,X) end,IntPosErrs),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
index 5854f8edbd..8f4d189b5a 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin.erl
@@ -1036,7 +1036,7 @@ decode_real2(Buffer0, Form, Len, RemBytes1) ->
%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
index 0457425445..6e12d36579 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_ber_bin_v2.erl
@@ -1034,7 +1034,7 @@ decode_real_notag(_Buffer, _Form) ->
%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
index b163aa24ac..97c92a2dd1 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per.erl
@@ -823,7 +823,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
index 15986cc217..aa2cf5ba88 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin.erl
@@ -1000,7 +1000,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
index 43d9bef54e..24f7949c21 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/asn1/asn1rt_per_bin_rt2ct.erl
@@ -1059,7 +1059,7 @@ decode_enumerated(Buffer,C,NamedNumberTup) when tuple(NamedNumberTup) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% bitstring NamedBitList
%% Val can be of:
-%% - [identifiers] where only named identifers are set to one,
+%% - [identifiers] where only named identifiers are set to one,
%% the Constraint must then have some information of the
%% bitlength.
%% - [list of ones and zeroes] all bits
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
index 4f0ca99cce..8be5b0cd6e 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/ftp.erl
@@ -108,7 +108,7 @@ user(Pid, User, Pass) ->
gen_server:call(Pid, {user, User, Pass}, infinity).
%% user(Pid, User, Pass,Acc)
-%% Purpose: Login whith a supplied account name
+%% Purpose: Login with a supplied account name
%% Args: Pid = pid(), User = Pass = Acc = string()
%% Returns: ok | {error, euser} | {error, econn} | {error, eacct}
user(Pid, User, Pass,Acc) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
index cf05431f5a..039960dac7 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http.erl
@@ -24,7 +24,7 @@
%%% - RFC 3310 Authentication and Key Agreement (AKA) (not yet!)
%%% - HTTP/1.1 Specification Errata found at
%%% http://world.std.com/~lawrence/http_errata.html
-%%% Additionaly follows the following recommendations:
+%%% Additionally follows the following recommendations:
%%% - RFC 3143 Known HTTP Proxy/Caching Problems (not yet!)
%%% - draft-nottingham-hdrreg-http-00.txt (not yet!)
%%%
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
index ebefcd7ad7..28ea42c685 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/http_lib.erl
@@ -697,7 +697,7 @@ lookup(Key,Val) ->
%%% This code is for parsing trailer headers in chunked messages.
%%% Will be deprecated whenever I have found an alternative working solution!
%%% Note:
-%%% - The header names are returned slighly different from what the what
+%%% - The header names are returned slightly different from what the what
%%% inet_drv returns
read_headers_old(Scheme,Socket,Timeout) ->
read_headers_old(<<>>,Scheme,Socket,Timeout,[],[]).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
index 45beaa84f7..d2653184aa 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpc_manager.erl
@@ -95,7 +95,7 @@ abort_session(Addr,Sid,Msg) ->
next_request(Addr,Sid) ->
gen_server:call(?HMACALL,{next_request,Addr,Sid},infinity).
-%%% Session handler has succeded to set up a new session, now register
+%%% Session handler has succeed to set up a new session, now register
%%% the socket
register_socket(Addr,Sid,Socket) ->
gen_server:cast(?HMACALL,{register_socket,Addr,Sid,Socket}).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
index 85e06f43b6..3058ac3556 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_manager.erl
@@ -224,7 +224,7 @@ is_blocked(ServerRef) ->
%%
-%% Module API. Theese functions are intended for use from modules only.
+%% Module API. These functions are intended for use from modules only.
%%
config_lookup(Port, Query) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
index d7a698d65a..07f951d057 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/httpd_parse.erl
@@ -109,7 +109,7 @@ get_persistens(HTTPVersion,ParsedHeader,ConfigDB)->
%%If it is version prio to 1.1 kill the conneciton
[$H, $T, $T, $P, $\/, $1, $.,N] ->
case httpd_util:key1search(ParsedHeader,"connection","keep-alive")of
- %%if the connection isnt ordered to go down let it live
+ %%if the connection isn't ordered to go down let it live
%%The keep-alive value is the older http/1.1 might be older
%%Clients that use it.
"keep-alive" when N >= 49 ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
index 6b872d7c95..73edcf6b92 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/jnets_httpd.hrl
@@ -60,7 +60,7 @@
% request_line, % string() Request Line
headers, % #req_headers{} Parsed request headers
entity_body= <<>>, % binary() Body of request
- connection, % boolean() true if persistant connection
+ connection, % boolean() true if persistent connection
status_code, % int() Status code
logging % int() 0=No logging
% 1=Only mod_log present
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
index e42494ff76..847d6e97c1 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_auth_mnesia.erl
@@ -53,7 +53,7 @@ store_directory_data(Directory, DirData) ->
%% API
%%
-%% Compability API
+%% Compatibility API
store_user(UserName, Password, Port, Dir, AccessPassword) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
index 1203aeaa4c..a48f73274b 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_esi.erl
@@ -440,7 +440,7 @@ try_new_erl_scheme_method(Info,Env,Input,Mod,Func)->
%%----------------------------------------------------------------------
-%%The function recieves the data from the process that generates the page
+%%The function receives the data from the process that generates the page
%%and send the data to the client through the mod_cgi:send function
%%----------------------------------------------------------------------
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
index f600c65e92..d95c745b07 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_htaccess.erl
@@ -272,10 +272,10 @@ controlIfAllowed(AllowedNetworks,UserNetwork,IfAllowed,IfDenied)->
end.
-%---------------------------------------------------------------------%
-%The Denycontrol isn't neccessary to preform since the allow control %
-%override the deny control %
-%---------------------------------------------------------------------%
+%--------------------------------------------------------------------%
+%The Denycontrol isn't necessary to preform since the allow control %
+%override the deny control %
+%--------------------------------------------------------------------%
controlDenyAllow(DeniedNetworks,AllowedNetworks,UserNetwork)->
case AllowedNetworks of
[{allow,all}]->
@@ -657,7 +657,7 @@ getData2(HtAccessFileNames,SplittedPath,Info)->
%----------------------------------------------------------------------
%HtAccessFilenames is a list the names the accesssfiles can have
-%Path is the shortest match agains all alias and documentroot
+%Path is the shortest match against all alias and documentroot
%rest of splitted path is a list of the parts of the path
%Info is the mod recod from the server
%----------------------------------------------------------------------
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
index 4e6030d5e2..f2c45c4a3f 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_range.erl
@@ -80,7 +80,7 @@ send_range_response(Path,Info,Ranges,FileInfo,LastModified)->
send_range_response(Path,Info,Start,Stop,FileInfo,LastModified)
end.
%%More than one range specified
-%%Send a multipart reponse to the user
+%%Send a multipart response to the user
%
%%An example of an multipart range response
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
index 76168f3890..a997db6880 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/inets/mod_responsecontrol.erl
@@ -48,8 +48,8 @@ do(Info) ->
%%----------------------------------------------------------------------
-%%Control that the request header did not contians any limitations
-%%wheather a response shall be createed or not
+%%Control that the request header did not contains any limitations
+%%whether a response shall be created or not
%%----------------------------------------------------------------------
do_responsecontrol(Info) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
index 19b571ac47..cc72a9b6fe 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia.erl
@@ -431,7 +431,7 @@ wrap_trans(State, Fun, Args, Retries, Mod, Kind) ->
%% read lock is only set on the first node
%% Nodes may either be a list of nodes or one node as an atom
%% Mnesia on all Nodes must be connected to each other, but
-%% it is not neccessary that they are up and running.
+%% it is not necessary that they are up and running.
lock(LockItem, LockKind) ->
case get(mnesia_activity_state) of
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
index fdbf3e4481..a85a08e4f8 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_bup.erl
@@ -775,7 +775,7 @@ restore_tables([Rec | Recs], Header, Schema, State = {local, LocalTabs, L}) ->
restore_tables([], _Header, _Schema, State) ->
State.
-%% Creates all neccessary dat files and inserts
+%% Creates all necessary dat files and inserts
%% the table definitions in the schema table
%%
%% Returns a list of local_tab tuples for all local tables
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
index 2b5c77b3ba..0403c7e978 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_checkpoint.erl
@@ -332,7 +332,7 @@ really_retain(Name, Tab) ->
%%
%% {min, MinTabs}
%% Minimize redundancy and only keep checkpoint info together with
-%% one replica, preferrably at the local node. If any node involved
+%% one replica, preferably at the local node. If any node involved
%% the checkpoint goes down, the checkpoint is deactivated.
%%
%% {max, MaxTabs}
@@ -345,7 +345,7 @@ really_retain(Name, Tab) ->
%% {ram_overrides_dump, Tabs}
%% Only applicable for ram_copies. Bool controls which versions of
%% the records that should be included in the checkpoint state.
-%% true means that the latest comitted records in ram (i.e. the
+%% true means that the latest committed records in ram (i.e. the
%% records that the application accesses) should be included
%% in the checkpoint. false means that the records dumped to
%% dat-files (the records that will be loaded at startup) should
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
index 70fee1741e..07667d73f5 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_loader.erl
@@ -61,7 +61,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
Repair = mnesia_monitor:get_env(auto_repair),
Args = [{keypos, 2}, public, named_table, Type],
case Reason of
- {dumper, _} -> %% Resources allready allocated
+ {dumper, _} -> %% Resources already allocated
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
@@ -82,7 +82,7 @@ do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == ram_copies ->
Args = [{keypos, 2}, public, named_table, Type],
case Reason of
- {dumper, _} -> %% Resources allready allocated
+ {dumper, _} -> %% Resources already allocated
ignore;
_ ->
mnesia_monitor:mktab(Tab, Args),
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
index 701aa8f598..accb631f2a 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_locker.erl
@@ -170,14 +170,14 @@ loop(State) ->
end;
%% If test_set_sticky fails, we send this to all nodes
- %% after aquiring a real write lock on Oid
+ %% after acquiring a real write lock on Oid
{stick, {Tab, _}, N} ->
?ets_insert(mnesia_sticky_locks, {Tab, N}),
loop(State);
%% The caller which sends this message, must have first
- %% aquired a write lock on the entire table
+ %% acquired a write lock on the entire table
{unstick, Tab} ->
?ets_delete(mnesia_sticky_locks, Tab),
loop(State);
@@ -738,11 +738,11 @@ dirty_sticky_lock(Tab, Key, Nodes, Lock) ->
sticky_wlock_table(Tid, Store, Tab) ->
sticky_lock(Tid, Store, {Tab, ?ALL}, write).
-%% aquire a wlock on Oid
+%% acquire a wlock on Oid
%% We store a {Tabname, write, Tid} in all locktables
%% on all nodes containing a copy of Tabname
%% We also store an item {{locks, Tab, Key}, write} in the
-%% local store when we have aquired the lock.
+%% local store when we have acquired the lock.
%%
wlock(Tid, Store, Oid) ->
{Tab, Key} = Oid,
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
index d1ff09ce29..7fd5f70e23 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_monitor.erl
@@ -144,7 +144,7 @@ check_protocol([{Node, {accept, Mon, _Version, Protocol}} | Tail], Protocols) ->
end,
[node(Mon) | check_protocol(Tail, Protocols)];
false ->
- unlink(Mon), % Get rid of unneccessary link
+ unlink(Mon), % Get rid of unnecessary link
check_protocol(Tail, Protocols)
end;
check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) ->
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
index ec07e1c1ab..fbd1356a7f 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_schema.erl
@@ -1265,7 +1265,7 @@ make_change_table_copy_type(Tab, Node, ToS) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% change index functions ....
-%% Pos is allready added by 1 in both of these functions
+%% Pos is already added by 1 in both of these functions
add_table_index(Tab, Pos) ->
schema_transaction(fun() -> do_add_table_index(Tab, Pos) end).
diff --git a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
index 3e08354b5a..09e310530d 100644
--- a/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
+++ b/lib/dialyzer/test/r9c_SUITE_data/src/mnesia/mnesia_tm.erl
@@ -1615,7 +1615,7 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
do_abort(Tid, Bin) when binary(Bin) ->
%% Possible optimization:
- %% If we want we could pass arround a flag
+ %% If we want we could pass around a flag
%% that tells us whether the binary contains
%% schema ops or not. Only if the binary
%% contains schema ops there are meningful
diff --git a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
index d608275efe..88ac486044 100644
--- a/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
+++ b/lib/dialyzer/test/small_SUITE_data/src/tuple1.erl
@@ -2,7 +2,7 @@
%%% File : tuple1.erl
%%% Author : Tobias Lindahl <[email protected]>
%%% Description : Exposed two bugs in the analysis;
-%%% one supressed warning and one crash.
+%%% one suppressed warning and one crash.
%%%
%%% Created : 13 Nov 2006 by Tobias Lindahl <[email protected]>
%%%-------------------------------------------------------------------
diff --git a/lib/diameter/include/diameter_gen.hrl b/lib/diameter/include/diameter_gen.hrl
index 611ad796a9..5361510d69 100644
--- a/lib/diameter/include/diameter_gen.hrl
+++ b/lib/diameter/include/diameter_gen.hrl
@@ -424,7 +424,7 @@ d(true, _, Name, Avp, Acc) ->
%% ... or not. Failures here won't be visible since they're a "normal"
%% occurrence if the peer sends a faulty AVP that we need to respond
-%% sensibly to. Log the occurence for traceability, but the peer will
+%% sensibly to. Log the occurrence for traceability, but the peer will
%% also receive info in the resulting answer message.
d(false, Reason, Name, Avp, {Avps, Acc}) ->
Stack = diameter_lib:get_stacktrace(),
diff --git a/lib/diameter/src/base/diameter_callback.erl b/lib/diameter/src/base/diameter_callback.erl
index f479cb6612..0e445492b8 100644
--- a/lib/diameter/src/base/diameter_callback.erl
+++ b/lib/diameter/src/base/diameter_callback.erl
@@ -35,7 +35,7 @@
%% in a callback applied to the atom-valued callback name and argument
%% list. For all callbacks not to this module, the 'extra' field is a
%% list of additional arguments, following arguments supplied by
-%% diameter but preceeding those of the diameter:evaluable() being
+%% diameter but preceding those of the diameter:evaluable() being
%% applied.
%%
%% For example, the following config to diameter:start_service/2, in
diff --git a/lib/diameter/src/base/diameter_config.erl b/lib/diameter/src/base/diameter_config.erl
index fdbbd412a1..1b48c0431f 100644
--- a/lib/diameter/src/base/diameter_config.erl
+++ b/lib/diameter/src/base/diameter_config.erl
@@ -865,7 +865,7 @@ init_cb(List) ->
V <- [proplists:get_value(F, List, D)]],
#diameter_callback{} = list_to_tuple([diameter_callback | Values]).
-%% Retreive and validate.
+%% Retrieve and validate.
get_opt(Key, List, Def, Other) ->
init_opt(Key, get_opt(Key, List, Def), [Def|Other]).
diff --git a/lib/diameter/src/base/diameter_peer_fsm.erl b/lib/diameter/src/base/diameter_peer_fsm.erl
index 996e75a8d3..4b394a2dbe 100644
--- a/lib/diameter/src/base/diameter_peer_fsm.erl
+++ b/lib/diameter/src/base/diameter_peer_fsm.erl
@@ -356,7 +356,7 @@ handle_info(T, #state{} = State) ->
%% Note that there's no guarantee that the service and transport
%% capabilities are good enough to build a CER/CEA that can be
-%% succesfully encoded. It's not checked at diameter:add_transport/2
+%% successfully encoded. It's not checked at diameter:add_transport/2
%% since this can be called before creating the service.
%% terminate/2
diff --git a/lib/diameter/src/info/diameter_info.erl b/lib/diameter/src/info/diameter_info.erl
index 59a3b94ee4..2a27600346 100644
--- a/lib/diameter/src/info/diameter_info.erl
+++ b/lib/diameter/src/info/diameter_info.erl
@@ -195,7 +195,7 @@ format(Tables, SFun, CFun)
%%%
%%% Description: Pretty-print records in a named tables as collected
%%% from local and remote nodes. Each table listing is
-%%% preceeded by a banner.
+%%% preceded by a banner.
%%% ----------------------------------------------------------
format(Local, Remote, SFun) ->
diff --git a/lib/diameter/src/transport/diameter_sctp.erl b/lib/diameter/src/transport/diameter_sctp.erl
index f48e4347ee..ad9f4b0d80 100644
--- a/lib/diameter/src/transport/diameter_sctp.erl
+++ b/lib/diameter/src/transport/diameter_sctp.erl
@@ -402,7 +402,7 @@ handle_info(T, #transport{} = S) ->
handle_info(T, #listener{} = S) ->
{noreply, #listener{} = l(T,S)}.
-%% Prior to the possiblity of setting pool_size on in transport
+%% Prior to the possibility of setting pool_size on in transport
%% configuration, a new accepting transport was only started following
%% the death of a predecessor, so that there was only at most one
%% previously started transport process waiting for an association.
diff --git a/lib/diameter/test/diameter_pool_SUITE.erl b/lib/diameter/test/diameter_pool_SUITE.erl
index eadb354a1d..383fa0a031 100644
--- a/lib/diameter/test/diameter_pool_SUITE.erl
+++ b/lib/diameter/test/diameter_pool_SUITE.erl
@@ -115,7 +115,7 @@ connect(ClientProt, ServerProt) ->
%% 'up' events. (Although it's likely.)
sleep(),
{9,5} = count("server", LRef, accept), %% 5 connections + 4 accepting
- %% Ensure ther are still the expected number of accepting transports
+ %% Ensure there are still the expected number of accepting transports
%% after stopping the client service.
ok = diameter:stop_service("client"),
sleep(),
diff --git a/lib/edoc/src/edoc_tags.erl b/lib/edoc/src/edoc_tags.erl
index 7e59f373b2..da078de0b9 100644
--- a/lib/edoc/src/edoc_tags.erl
+++ b/lib/edoc/src/edoc_tags.erl
@@ -227,7 +227,7 @@ filter_tags([#tag{name = N, line = L} = T | Ts], Tags, Where, Ts1) ->
filter_tags([], _, _, Ts) ->
lists:reverse(Ts).
-%% Check occurrances of tags.
+%% Check occurrences of tags.
check_tags(Ts, Allow, Single, Where) ->
check_tags(Ts, Allow, Single, Where, false, sets:new()).
diff --git a/lib/eldap/test/README b/lib/eldap/test/README
index ec774c1ae3..af1bf6a082 100644
--- a/lib/eldap/test/README
+++ b/lib/eldap/test/README
@@ -16,7 +16,7 @@ To start slapd:
This will however not work, since slapd is guarded by apparmor that checks that slapd does not access other than allowed files...
-To make a local extension of alowed operations:
+To make a local extension of allowed operations:
sudo emacs /etc/apparmor.d/local/usr.sbin.slapd
and, after the change (yes, at least on Ubuntu it is right to edit ../local/.. but run with another file):
diff --git a/lib/erl_interface/src/README b/lib/erl_interface/src/README
index feee2e48e8..7591615f78 100644
--- a/lib/erl_interface/src/README
+++ b/lib/erl_interface/src/README
@@ -11,7 +11,7 @@ Also, assertions are enabled, meaning that the code will be a
little bit slower. In the final release, there will be two
alternative libraries shipped, with and without assertions.
-If an assertion triggers, there will be a printout similiar to this
+If an assertion triggers, there will be a printout similar to this
one:
Assertion failed: ep != NULL in erl_eterm.c, line 694
diff --git a/lib/erl_interface/src/legacy/erl_marshal.c b/lib/erl_interface/src/legacy/erl_marshal.c
index 2bdf5f2134..527ae0ef8f 100644
--- a/lib/erl_interface/src/legacy/erl_marshal.c
+++ b/lib/erl_interface/src/legacy/erl_marshal.c
@@ -1626,7 +1626,7 @@ static int cmp_refs(unsigned char **e1, unsigned char **e2)
if (cre1 != cre2)
return cre1 < cre2 ? -1 : 1;
- /* ... and then finaly ids. */
+ /* ... and then finally ids. */
if (n1 != n2) {
unsigned char zero[] = {0, 0, 0, 0};
if (n1 > n2)
@@ -1791,7 +1791,7 @@ static int cmp_exe2(unsigned char **e1, unsigned char **e2)
if (port1.creation < port2.creation) return -1;
else if (port1.creation > port2.creation) return 1;
- /* ... and then finaly ids. */
+ /* ... and then finally ids. */
if (port1.id < port2.id) return -1;
else if (port1.id > port2.id) return 1;
diff --git a/lib/erl_interface/src/misc/ei_locking.c b/lib/erl_interface/src/misc/ei_locking.c
index 85b2a5fd8b..a0e00b7871 100644
--- a/lib/erl_interface/src/misc/ei_locking.c
+++ b/lib/erl_interface/src/misc/ei_locking.c
@@ -76,8 +76,8 @@ ei_mutex_t *ei_mutex_create(void)
return l;
}
-/*
- * Free a mutex and the structure asociated with it.
+/*
+ * Free a mutex and the structure associated with it.
*
* This function attempts to obtain the mutex before releasing it;
* If nblock == 1 and the mutex was unavailable, the function will
diff --git a/lib/erl_interface/test/ei_decode_SUITE.erl b/lib/erl_interface/test/ei_decode_SUITE.erl
index 1495a0d5d9..10e90685c8 100644
--- a/lib/erl_interface/test/ei_decode_SUITE.erl
+++ b/lib/erl_interface/test/ei_decode_SUITE.erl
@@ -99,7 +99,7 @@ test_ei_decode_ulonglong(Config) when is_list(Config) ->
%% ######################################################################## %%
-%% A "character" for us is an 8 bit integer, alwasy positive, i.e.
+%% A "character" for us is an 8 bit integer, always positive, i.e.
%% it is unsigned.
%% FIXME maybe the API should change to use "unsigned char" to be clear?!
diff --git a/lib/erl_interface/test/erl_eterm_SUITE.erl b/lib/erl_interface/test/erl_eterm_SUITE.erl
index 0e51a50c19..7fd46694b8 100644
--- a/lib/erl_interface/test/erl_eterm_SUITE.erl
+++ b/lib/erl_interface/test/erl_eterm_SUITE.erl
@@ -31,7 +31,7 @@
%%% 2. Constructing terms (the erl_mk_xxx() functions and erl_copy_term()).
%%% 3. Extracting & info functions (erl_hd(), erl_length() etc).
%%% 4. I/O list functions.
-%%% 5. Miscellanous functions.
+%%% 5. Miscellaneous functions.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-export([all/0, suite/0,
diff --git a/lib/eunit/doc/src/notes.xml b/lib/eunit/doc/src/notes.xml
index 8509f44ffc..d7ec2108e9 100644
--- a/lib/eunit/doc/src/notes.xml
+++ b/lib/eunit/doc/src/notes.xml
@@ -498,7 +498,7 @@
<list>
<item>
<p>
- Miscellanous updates.</p>
+ Miscellaneous updates.</p>
<p>
Own Id: OTP-8038</p>
</item>
diff --git a/lib/hipe/cerl/cerl_to_icode.erl b/lib/hipe/cerl/cerl_to_icode.erl
index acad8a9da4..e37eae8a03 100644
--- a/lib/hipe/cerl/cerl_to_icode.erl
+++ b/lib/hipe/cerl/cerl_to_icode.erl
@@ -2621,7 +2621,7 @@ icode_switch_val(Arg, Fail, Length, Cases) ->
hipe_icode:mk_switch_val(Arg, Fail, Length, Cases).
icode_switch_tuple_arity(Arg, Fail, Length, Cases) ->
- SortedCases = lists:keysort(1, Cases), %% immitate BEAM compiler - Kostis
+ SortedCases = lists:keysort(1, Cases), %% imitate BEAM compiler - Kostis
hipe_icode:mk_switch_tuple_arity(Arg, Fail, Length, SortedCases).
diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl
index 91ee104f77..9d46d4ac81 100644
--- a/lib/hipe/cerl/erl_types.erl
+++ b/lib/hipe/cerl/erl_types.erl
@@ -2235,16 +2235,21 @@ t_has_var_list([]) -> false.
-spec t_collect_vars(erl_type()) -> [erl_type()].
t_collect_vars(T) ->
- t_collect_vars(T, []).
+ Vs = t_collect_vars(T, maps:new()),
+ [V || {V, _} <- maps:to_list(Vs)].
--spec t_collect_vars(erl_type(), [erl_type()]) -> [erl_type()].
+-type ctab() :: #{erl_type() => 'any'}.
+
+-spec t_collect_vars(erl_type(), ctab()) -> ctab().
t_collect_vars(?var(_) = Var, Acc) ->
- ordsets:add_element(Var, Acc);
+ maps:put(Var, any, Acc);
t_collect_vars(?function(Domain, Range), Acc) ->
- ordsets:union(t_collect_vars(Domain, Acc), t_collect_vars(Range, []));
+ Acc1 = t_collect_vars(Domain, Acc),
+ t_collect_vars(Range, Acc1);
t_collect_vars(?list(Contents, Termination, _), Acc) ->
- ordsets:union(t_collect_vars(Contents, Acc), t_collect_vars(Termination, []));
+ Acc1 = t_collect_vars(Contents, Acc),
+ t_collect_vars(Termination, Acc1);
t_collect_vars(?product(Types), Acc) ->
t_collect_vars_list(Types, Acc);
t_collect_vars(?tuple(?any, ?any, ?any), Acc) ->
@@ -4424,9 +4429,17 @@ mod_name(Mod, Name) ->
-type site() :: {'type', mta()} | {'spec', mfa()} | {'record', mra()}.
-type cache_key() :: {module(), atom(), expand_depth(),
[erl_type()], type_names()}.
--opaque cache() :: #{cache_key() => {erl_type(), expand_limit()}}.
+-type mod_type_table() :: ets:tid().
+-record(cache,
+ {
+ types = maps:new() :: #{cache_key() => {erl_type(), expand_limit()}},
+ mod_recs = {mrecs, dict:new()} :: 'undefined'
+ | {'mrecs', mod_records()}
+ }).
+
+-opaque cache() :: #cache{}.
--spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_records(),
+-spec t_from_form(parse_form(), sets:set(mfa()), site(), mod_type_table(),
var_table(), cache()) -> {erl_type(), cache()}.
t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) ->
@@ -4438,11 +4451,12 @@ t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) ->
t_from_form_without_remote(Form, Site, TypeTable) ->
Module = site_module(Site),
- RecDict = dict:from_list([{Module, TypeTable}]),
+ ModRecs = dict:from_list([{Module, TypeTable}]),
ExpTypes = replace_by_none,
VarTab = var_table__new(),
- Cache = cache__new(),
- t_from_form1(Form, ExpTypes, Site, RecDict, VarTab, Cache).
+ Cache0 = cache__new(),
+ Cache = Cache0#cache{mod_recs = {mrecs, ModRecs}},
+ t_from_form1(Form, ExpTypes, Site, undefined, VarTab, Cache).
%% REC_TYPE_LIMIT is used for limiting the depth of recursive types.
%% EXPAND_LIMIT is used for limiting the size of types by
@@ -4457,13 +4471,13 @@ t_from_form_without_remote(Form, Site, TypeTable) ->
-record(from_form, {site :: site(),
xtypes :: sets:set(mfa()) | 'replace_by_none',
- mrecs :: mod_records(),
+ mrecs :: 'undefined' | mod_type_table(),
vtab :: var_table(),
tnames :: type_names()}).
-spec t_from_form1(parse_form(), sets:set(mfa()) | 'replace_by_none',
- site(), mod_records(), var_table(), cache()) ->
- {erl_type(), cache()}.
+ site(), 'undefined' | mod_type_table(), var_table(),
+ cache()) -> {erl_type(), cache()}.
t_from_form1(Form, ET, Site, MR, V, C) ->
TypeNames = initial_typenames(Site),
@@ -4709,13 +4723,13 @@ from_form({opaque, _L, Name, {Mod, Args, Rep}}, _S, _D, L, C) ->
builtin_type(Name, Type, S, D, L, C) ->
#from_form{site = Site, mrecs = MR} = S,
M = site_module(Site),
- case dict:find(M, MR) of
- {ok, R} ->
+ case lookup_module_types(M, MR, C) of
+ {R, C1} ->
case lookup_type(Name, 0, R) of
{_, {{_M, _FL, _F, _A}, _T}} ->
- type_from_form(Name, [], S, D, L, C);
+ type_from_form(Name, [], S, D, L, C1);
error ->
- {Type, L, C}
+ {Type, L, C1}
end;
error ->
{Type, L, C}
@@ -4728,9 +4742,9 @@ type_from_form(Name, Args, S, D, L, C) ->
TypeName = {type, {Module, Name, ArgsLen}},
case can_unfold_more(TypeName, TypeNames) of
true ->
- {ok, R} = dict:find(Module, MR),
+ {R, C1} = lookup_module_types(Module, MR, C),
type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames,
- S, D, L, C);
+ S, D, L, C1);
false ->
{t_any(), L, C}
end.
@@ -4782,24 +4796,24 @@ remote_from_form(RemMod, Name, Args, S, D, L, C) ->
true ->
ArgsLen = length(Args),
MFA = {RemMod, Name, ArgsLen},
- case dict:find(RemMod, MR) of
+ case lookup_module_types(RemMod, MR, C) of
error ->
self() ! {self(), ext_types, MFA},
{t_any(), L, C};
- {ok, RemDict} ->
+ {RemDict, C1} ->
case sets:is_element(MFA, ET) of
true ->
RemType = {type, MFA},
case can_unfold_more(RemType, TypeNames) of
true ->
remote_from_form1(RemMod, Name, Args, ArgsLen, RemDict,
- RemType, TypeNames, S, D, L, C);
+ RemType, TypeNames, S, D, L, C1);
false ->
- {t_any(), L, C}
+ {t_any(), L, C1}
end;
false ->
self() ! {self(), ext_types, {RemMod, Name, ArgsLen}},
- {t_any(), L, C}
+ {t_any(), L, C1}
end
end
end.
@@ -4874,15 +4888,15 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) ->
case can_unfold_more(RecordType, TypeNames) of
true ->
M = site_module(Site),
- {ok, R} = dict:find(M, MR),
+ {R, C1} = lookup_module_types(M, MR, C),
case lookup_record(Name, R) of
{ok, DeclFields} ->
NewTypeNames = [RecordType|TypeNames],
Site1 = {record, {M, Name, length(DeclFields)}},
S1 = S#from_form{site = Site1, tnames = NewTypeNames},
Fun = fun(D, L) ->
- {GetModRec, L1, C1} =
- get_mod_record(ModFields, DeclFields, S1, D, L, C),
+ {GetModRec, L1, C2} =
+ get_mod_record(ModFields, DeclFields, S1, D, L, C1),
case GetModRec of
{error, FieldName} ->
throw({error,
@@ -4890,12 +4904,12 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) ->
[Name, FieldName])});
{ok, NewFields} ->
S2 = S1#from_form{vtab = var_table__new()},
- {NewFields1, L2, C2} =
- fields_from_form(NewFields, S2, D, L1, C1),
+ {NewFields1, L2, C3} =
+ fields_from_form(NewFields, S2, D, L1, C2),
Rec = t_tuple(
[t_atom(Name)|[Type
|| {_FieldName, Type} <- NewFields1]]),
- {Rec, L2, C2}
+ {Rec, L2, C3}
end
end,
recur_limit(Fun, D0, L0, RecordType, TypeNames);
@@ -5026,7 +5040,7 @@ recur_limit(Fun, D, L, TypeName, TypeNames) ->
end.
-spec t_check_record_fields(parse_form(), sets:set(mfa()), site(),
- mod_records(), var_table(), cache()) -> cache().
+ mod_type_table(), var_table(), cache()) -> cache().
t_check_record_fields(Form, ExpTypes, Site, RecDict, VarTable, Cache) ->
State = #from_form{site = Site,
@@ -5070,13 +5084,13 @@ check_record_fields({user_type, _L, _Name, Args}, S, C) ->
check_record({atom, _, Name}, ModFields, S, C) ->
#from_form{site = Site, mrecs = MR} = S,
M = site_module(Site),
- {ok, R} = dict:find(M, MR),
+ {R, C1} = lookup_module_types(M, MR, C),
{ok, DeclFields} = lookup_record(Name, R),
- case check_fields(Name, ModFields, DeclFields, S, C) of
+ case check_fields(Name, ModFields, DeclFields, S, C1) of
{error, FieldName} ->
throw({error, io_lib:format("Illegal declaration of #~w{~w}\n",
[Name, FieldName])});
- C1 -> C1
+ C2 -> C2
end.
check_fields(RecName, [{type, _, field_type, [{atom, _, Name}, Abstr]}|Left],
@@ -5106,7 +5120,7 @@ site_module({_, {Module, _, _}}) ->
-spec cache__new() -> cache().
cache__new() ->
- maps:new().
+ #cache{}.
-spec cache_key(module(), atom(), [erl_type()],
type_names(), expand_depth()) -> cache_key().
@@ -5123,8 +5137,8 @@ cache_key(Module, Name, ArgTypes, TypeNames, D) ->
-spec cache_find(cache_key(), cache()) ->
{erl_type(), expand_limit()} | 'error'.
-cache_find(Key, Cache) ->
- case maps:find(Key, Cache) of
+cache_find(Key, #cache{types = Types}) ->
+ case maps:find(Key, Types) of
{ok, Value} ->
Value;
error ->
@@ -5136,8 +5150,9 @@ cache_find(Key, Cache) ->
cache_put(_Key, _Type, DeltaL, Cache) when DeltaL < 0 ->
%% The type is truncated; do not reuse it.
Cache;
-cache_put(Key, Type, DeltaL, Cache) ->
- maps:put(Key, {Type, DeltaL}, Cache).
+cache_put(Key, Type, DeltaL, #cache{types = Types} = Cache) ->
+ NewTypes = maps:put(Key, {Type, DeltaL}, Types),
+ Cache#cache{types = NewTypes}.
-spec t_var_names([erl_type()]) -> [atom()].
@@ -5236,14 +5251,12 @@ t_form_to_string({type, _L, union, Args}) ->
t_form_to_string({type, _L, Name, []} = T) ->
try
M = mod,
- D0 = maps:new(),
- MR = dict:from_list([{M, D0}]),
Site = {type, {M,Name,0}},
V = var_table__new(),
C = cache__new(),
State = #from_form{site = Site,
xtypes = sets:new(),
- mrecs = MR,
+ mrecs = 'undefined',
vtab = V,
tnames = []},
{T1, _, _} = from_form(T, State, _Deep=1000, _ALot=1000000, C),
@@ -5297,6 +5310,28 @@ is_erl_type(?unit) -> true;
is_erl_type(#c{}) -> true;
is_erl_type(_) -> false.
+-spec lookup_module_types(module(), mod_type_table(), cache()) ->
+ 'error' | {type_table(), cache()}.
+
+lookup_module_types(Module, CodeTable, Cache) ->
+ #cache{mod_recs = ModRecs} = Cache,
+ case ModRecs of
+ undefined -> error;
+ {mrecs, MRecs} ->
+ case dict:find(Module, MRecs) of
+ {ok, R} ->
+ {R, Cache};
+ error ->
+ try ets:lookup_element(CodeTable, Module, 2) of
+ R ->
+ NewMRecs = dict:store(Module, R, MRecs),
+ {R, Cache#cache{mod_recs = {mrecs, NewMRecs}}}
+ catch
+ _:_ -> error
+ end
+ end
+ end.
+
-spec lookup_record(atom(), type_table()) ->
'error' | {'ok', [{atom(), parse_form(), erl_type()}]}.
diff --git a/lib/hipe/doc/src/notes.xml b/lib/hipe/doc/src/notes.xml
index 0bdd60adfd..314fd55ba3 100644
--- a/lib/hipe/doc/src/notes.xml
+++ b/lib/hipe/doc/src/notes.xml
@@ -1297,7 +1297,7 @@
<list>
<item>
<p>
- Miscellanous updates.</p>
+ Miscellaneous updates.</p>
<p>
Own Id: OTP-8038</p>
</item>
diff --git a/lib/hipe/flow/cfg.inc b/lib/hipe/flow/cfg.inc
index 362c5b697c..17342d3b60 100644
--- a/lib/hipe/flow/cfg.inc
+++ b/lib/hipe/flow/cfg.inc
@@ -212,7 +212,7 @@ info_update(CFG, I) ->
-ifndef(GEN_CFG).
-spec other_entrypoints(cfg()) -> [cfg_lbl()].
-%% @doc Returns a list of labels that are refered to from the data section.
+%% @doc Returns a list of labels that are referred to from the data section.
other_entrypoints(CFG) ->
hipe_consttab:referred_labels(data(CFG)).
diff --git a/lib/hipe/flow/hipe_dominators.erl b/lib/hipe/flow/hipe_dominators.erl
index 570452c14e..749edd4f72 100644
--- a/lib/hipe/flow/hipe_dominators.erl
+++ b/lib/hipe/flow/hipe_dominators.erl
@@ -317,7 +317,7 @@ updateCell(Value, Field, WD) ->
%%>----------------------------------------------------------------------<
%% Procedure : dfs/1
%% Purpose : The main purpose of this function is to traverse the CFG in
-%% a depth first order. It is aslo used to initialize certain
+%% a depth first order. It is also used to initialize certain
%% elements defined in a workDataCell.
%% Arguments : CFG - a Control Flow Graph representation
%% Returns : A table (WorkData) and the total number of elements in
diff --git a/lib/hipe/llvm/hipe_rtl_to_llvm.erl b/lib/hipe/llvm/hipe_rtl_to_llvm.erl
index f8911c1909..208d86841f 100644
--- a/lib/hipe/llvm/hipe_rtl_to_llvm.erl
+++ b/lib/hipe/llvm/hipe_rtl_to_llvm.erl
@@ -1431,7 +1431,7 @@ relocs_to_list(Relocs) ->
%% constants/labels.
handle_relocations(Relocs, Data, Fun) ->
RelocsList = relocs_to_list(Relocs),
- %% Seperate Relocations according to their type
+ %% Separate Relocations according to their type
{CallList, AtomList, ClosureList, ClosureLabels, SwitchList} =
seperate_relocs(RelocsList),
%% Create code to declare atoms
@@ -1474,7 +1474,7 @@ handle_relocations(Relocs, Data, Fun) ->
LocalVariables = AtomLoad ++ ClosureLoad ++ ConstLoad,
{Relocs4, ExternalDeclarations, LocalVariables}.
-%% @doc Seperate relocations according to their type.
+%% @doc Separate relocations according to their type.
seperate_relocs(Relocs) ->
seperate_relocs(Relocs, [], [], [], [], []).
diff --git a/lib/hipe/main/hipe.erl b/lib/hipe/main/hipe.erl
index 90ef84ca51..06facae5c1 100644
--- a/lib/hipe/main/hipe.erl
+++ b/lib/hipe/main/hipe.erl
@@ -441,7 +441,7 @@ compile(Name, File, Opts0) when is_atom(Name) ->
?error_msg("Cannot get Core Erlang code from BEAM binary.",[]),
?EXIT({cant_compile_core_from_binary});
true ->
- case filename:find_src(filename:rootname(File, ".beam")) of
+ case filelib:find_source(filename:rootname(File,".beam") ++ ".beam") of
{error, _} ->
?error_msg("Cannot find source code for ~p.", [File]),
?EXIT({cant_find_source_code});
diff --git a/lib/hipe/opt/hipe_schedule.erl b/lib/hipe/opt/hipe_schedule.erl
index 531690f885..0f25940e3d 100644
--- a/lib/hipe/opt/hipe_schedule.erl
+++ b/lib/hipe/opt/hipe_schedule.erl
@@ -1337,10 +1337,10 @@ cd([{N,I}|Xs], DAG, PrevBr, PrevUnsafe, PrevOthers) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Function : cd_branch_to_other_deps
%% Argument : N - index of branch
-%% Ms - list of indexes of "others" preceeding instrs
+%% Ms - list of indexes of "others" preceding instrs
%% DAG - dependence graph
%% Returns : DAG - new graph
-%% Description : Makes preceeding instrs fixed so they don't bypass a branch
+%% Description : Makes preceding instrs fixed so they don't bypass a branch
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
cd_branch_to_other_deps(_, [], DAG) ->
DAG;
diff --git a/lib/hipe/opt/hipe_spillmin_color.erl b/lib/hipe/opt/hipe_spillmin_color.erl
index 50e073a467..41f1972df7 100644
--- a/lib/hipe/opt/hipe_spillmin_color.erl
+++ b/lib/hipe/opt/hipe_spillmin_color.erl
@@ -119,7 +119,7 @@ color_heuristic(IG, Min, Max, Safe, MaxNodes, Target, MaxDepth) ->
end;
_ ->
%% This can be increased from 2, and by this the heuristic can be
- %% exited earlier, but the same can be achived by decreasing the
+ %% exited earlier, but the same can be achieved by decreasing the
%% recursion depth. This should not be decreased below 2.
case (Max - Min) < 2 of
true ->
diff --git a/lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl b/lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl
index 76b2a91f94..cce91530f4 100644
--- a/lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl
+++ b/lib/hipe/test/maps_SUITE_data/maps_warn_pair_key_overloaded.erl
@@ -14,7 +14,6 @@ test() ->
"hi2" => lists:subtract([1,2],[1]),
"hi3" => +3,
"hi1" => erlang:min(1,2),
- "hi1" => erlang:hash({1,2},35),
"hi1" => erlang:phash({1,2},33),
"hi1" => erlang:phash2({1,2},34),
"hi1" => erlang:integer_to_binary(1337),
diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml
index 398fc7e5b6..5c3b5a2d3c 100644
--- a/lib/inets/doc/src/notes.xml
+++ b/lib/inets/doc/src/notes.xml
@@ -33,7 +33,22 @@
<file>notes.xml</file>
</header>
- <section><title>Inets 6.3.4</title>
+ <section><title>Inets 6.3.5</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Correct misstakes in ftp client introduced in inets-6.3.4</p>
+ <p>
+ Own Id: OTP-14203 Aux Id: OTP-13982 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 6.3.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/inets/src/ftp/ftp.erl b/lib/inets/src/ftp/ftp.erl
index 911f5b71a7..23d6483291 100644
--- a/lib/inets/src/ftp/ftp.erl
+++ b/lib/inets/src/ftp/ftp.erl
@@ -1477,10 +1477,7 @@ handle_info({Trpt, Socket, Data}, #state{dsock = {Trpt,Socket}} = State0) when T
handle_info({Cls, Socket}, #state{dsock = {Trpt,Socket},
caller = {recv_file, Fd}} = State)
when {Cls,Trpt}=={tcp_closed,tcp} ; {Cls,Trpt}=={ssl_closed,ssl} ->
- case file_close(Fd) of
- ok -> ok;
- {error,einval} -> ok
- end,
+ file_close(Fd),
progress_report({transfer_size, 0}, State),
activate_ctrl_connection(State),
{noreply, State#state{dsock = undefined, data = <<>>}};
@@ -2066,10 +2063,7 @@ handle_ctrl_result({pos_prel, _}, #state{caller = {recv_file, _}} = State0) ->
end;
handle_ctrl_result({Status, _}, #state{caller = {recv_file, Fd}} = State) ->
- case file_close(Fd) of
- ok -> ok;
- {error, einval} -> ok
- end,
+ file_close(Fd),
close_data_connection(State),
ctrl_result_response(Status, State#state{dsock = undefined},
{error, epath});
@@ -2345,7 +2339,7 @@ accept_data_connection(#state{mode = passive} = State) ->
send_ctrl_message(_S=#state{csock = Socket, verbose = Verbose}, Message) ->
verbose(lists:flatten(Message),Verbose,send),
?DBG('<--ctrl ~p ---- ~s~p~n',[Socket,Message,_S]),
- ok = send_message(Socket, Message).
+ _ = send_message(Socket, Message).
send_data_message(_S=#state{dsock = Socket}, Message) ->
?DBG('<==data ~p ==== ~s~n~p~n',[Socket,Message,_S]),
@@ -2366,37 +2360,44 @@ send_message({tcp, Socket}, Message) ->
send_message({ssl, Socket}, Message) ->
ssl:send(Socket, Message).
-activate_ctrl_connection(#state{csock = Socket, ctrl_data = {<<>>, _, _}}) ->
- ok = activate_connection(Socket);
-activate_ctrl_connection(#state{csock = Socket}) ->
- ok = activate_connection(Socket),
+activate_ctrl_connection(#state{csock = CSock, ctrl_data = {<<>>, _, _}}) ->
+ activate_connection(CSock);
+activate_ctrl_connection(#state{csock = CSock}) ->
+ activate_connection(CSock),
%% We have already received at least part of the next control message,
%% that has been saved in ctrl_data, process this first.
- self() ! {socket_type(Socket), unwrap_socket(Socket), <<>>},
+ self() ! {socket_type(CSock), unwrap_socket(CSock), <<>>},
ok.
+activate_data_connection(#state{dsock = DSock} = State) ->
+ activate_connection(DSock),
+ State.
+
+activate_connection(Socket) ->
+ ignore_return_value(
+ case socket_type(Socket) of
+ tcp -> inet:setopts(unwrap_socket(Socket), [{active, once}]);
+ ssl -> ssl:setopts(unwrap_socket(Socket), [{active, once}])
+ end).
+
+
+ignore_return_value(_) -> ok.
+
unwrap_socket({tcp,Socket}) -> Socket;
unwrap_socket({ssl,Socket}) -> Socket.
socket_type({tcp,_Socket}) -> tcp;
socket_type({ssl,_Socket}) -> ssl.
-activate_data_connection(#state{dsock = Socket} = State) ->
- ok = activate_connection(Socket),
- State.
-
-activate_connection({tcp, Socket}) -> inet:setopts(Socket, [{active, once}]);
-activate_connection({ssl, Socket}) -> ssl:setopts(Socket, [{active, once}]).
-
close_ctrl_connection(#state{csock = undefined}) -> ok;
close_ctrl_connection(#state{csock = Socket}) -> close_connection(Socket).
close_data_connection(#state{dsock = undefined}) -> ok;
close_data_connection(#state{dsock = Socket}) -> close_connection(Socket).
-close_connection({lsock,Socket}) -> gen_tcp:close(Socket);
-close_connection({tcp, Socket}) -> gen_tcp:close(Socket);
-close_connection({ssl, Socket}) -> ssl:close(Socket).
+close_connection({lsock,Socket}) -> ignore_return_value( gen_tcp:close(Socket) );
+close_connection({tcp, Socket}) -> ignore_return_value( gen_tcp:close(Socket) );
+close_connection({ssl, Socket}) -> ignore_return_value( ssl:close(Socket) ).
%% ------------ FILE HANDLING ----------------------------------------
send_file(#state{tls_upgrading_data_connection = {true, CTRL, _}} = State, Fd) ->
@@ -2408,7 +2409,7 @@ send_file(State, Fd) ->
progress_report({binary, Bin}, State),
send_file(State, Fd);
{ok, _, _} ->
- ok = file_close(Fd),
+ file_close(Fd),
close_data_connection(State),
progress_report({transfer_size, 0}, State),
activate_ctrl_connection(State),
@@ -2423,7 +2424,7 @@ file_open(File, Option) ->
file:open(File, [raw, binary, Option]).
file_close(Fd) ->
- file:close(Fd).
+ ignore_return_value( file:close(Fd) ).
file_read(Fd) ->
case file:read(Fd, ?FILE_BUFSIZE) of
diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl
index 7e20a9ba67..82273c8c74 100644
--- a/lib/inets/src/http_server/httpd_request_handler.erl
+++ b/lib/inets/src/http_server/httpd_request_handler.erl
@@ -241,9 +241,9 @@ handle_info({tcp_closed, _}, State) ->
handle_info({ssl_closed, _}, State) ->
{stop, normal, State};
handle_info({tcp_error, _, _} = Reason, State) ->
- {stop, Reason, State};
+ {stop, {shutdown, Reason}, State};
handle_info({ssl_error, _, _} = Reason, State) ->
- {stop, Reason, State};
+ {stop, {shutdown, Reason}, State};
%% Timeouts
handle_info(timeout, #state{mfa = {_, parse, _}} = State) ->
diff --git a/lib/inets/src/inets_app/inets.appup.src b/lib/inets/src/inets_app/inets.appup.src
index 3a31daeb20..d28d4cd766 100644
--- a/lib/inets/src/inets_app/inets.appup.src
+++ b/lib/inets/src/inets_app/inets.appup.src
@@ -18,10 +18,14 @@
%% %CopyrightEnd%
{"%VSN%",
[
+ {<<"6.2.4">>, [{load_module, httpd_request_handler,
+ soft_purge, soft_purge, []}]},
{<<"6\\..*">>,[{restart_application, inets}]},
{<<"5\\..*">>,[{restart_application, inets}]}
],
[
+ {<<"6.2.4">>, [{load_module, httpd_request_handler,
+ soft_purge, soft_purge, []}]},
{<<"6\\..*">>,[{restart_application, inets}]},
{<<"5\\..*">>,[{restart_application, inets}]}
]
diff --git a/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf b/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf
index 3f9fde03b5..ec05fc6714 100644
--- a/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf
+++ b/lib/inets/test/httpd_test_data/server_root/conf/httpd.conf
@@ -128,7 +128,7 @@ SecurityDiskLogSize 200000 10
MaxClients 50
-# KeepAlive set the flag for persistent connections. For peristent connections
+# KeepAlive set the flag for persistent connections. For persistent connections
# set KeepAlive to on. To use One request per connection set the flag to off
# Note: The value has changed since previous version of INETS.
KeepAlive on
diff --git a/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf b/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf
index 3f9fde03b5..ec05fc6714 100644
--- a/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf
+++ b/lib/inets/test/old_httpd_SUITE_data/server_root/conf/httpd.conf
@@ -128,7 +128,7 @@ SecurityDiskLogSize 200000 10
MaxClients 50
-# KeepAlive set the flag for persistent connections. For peristent connections
+# KeepAlive set the flag for persistent connections. For persistent connections
# set KeepAlive to on. To use One request per connection set the flag to off
# Note: The value has changed since previous version of INETS.
KeepAlive on
diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk
index eef5abd610..9591ab22ed 100644
--- a/lib/inets/vsn.mk
+++ b/lib/inets/vsn.mk
@@ -19,6 +19,6 @@
# %CopyrightEnd%
APPLICATION = inets
-INETS_VSN = 6.3.4
+INETS_VSN = 6.3.5
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)"
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
index 7891871e76..b9b4223155 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/AbstractConnection.java
@@ -30,7 +30,7 @@ import java.util.Random;
* received from the peer.
*
* <p>
- * This abstract class provides the neccesary methods to maintain the actual
+ * This abstract class provides the necessary methods to maintain the actual
* connection and encode the messages and headers in the proper format according
* to the Erlang distribution protocol. Subclasses can use these methods to
* provide a more or less transparent communication channel as desired.
diff --git a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java
index 70c9e6db4a..bd3a3f4ad3 100644
--- a/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java
+++ b/lib/jinterface/java_src/com/ericsson/otp/erlang/OtpMbox.java
@@ -38,7 +38,7 @@ package com.ericsson.otp.erlang;
* <p>
* Mailboxes can be named, either at creation or later. Messages can be sent to
* named mailboxes and named Erlang processes without knowing the
- * {@link OtpErlangPid pid} that identifies the mailbox. This is neccessary in
+ * {@link OtpErlangPid pid} that identifies the mailbox. This is necessary in
* order to set up initial communication between parts of an application. Each
* mailbox can have at most one name.
* </p>
diff --git a/lib/kernel/doc/src/kernel_app.xml b/lib/kernel/doc/src/kernel_app.xml
index df681a505f..b342fff0d3 100644
--- a/lib/kernel/doc/src/kernel_app.xml
+++ b/lib/kernel/doc/src/kernel_app.xml
@@ -11,7 +11,7 @@
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-
+
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
@@ -19,7 +19,7 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-
+
</legalnotice>
<title>kernel</title>
@@ -58,6 +58,60 @@
</section>
<section>
+ <title>OS Signal Event Handler</title>
+ <p>Asynchronous OS signals may be subscribed to via the Kernel applications event manager
+ (see <seealso marker="doc/design_principles:des_princ">OTP Design Principles</seealso> and
+ <seealso marker="stdlib:gen_event"><c>gen_event(3)</c></seealso>) registered as <c>erl_signal_server</c>.
+ A default signal handler is installed which handles the following signals:</p>
+ <taglist>
+ <tag><c>sigusr1</c></tag>
+ <item><p>The default handler will halt Erlang and produce a crashdump
+ with slogan "Received SIGUSR1".
+ This is equivalent to calling <c>erlang:halt("Received SIGUSR1")</c>.
+ </p></item>
+
+ <tag><c>sigquit</c></tag>
+ <item><p>The default handler will halt Erlang immediately.
+ This is equivalent to calling <c>erlang:halt()</c>.
+ </p></item>
+
+ <tag><c>sigterm</c></tag>
+ <item><p>The default handler will terminate Erlang normally.
+ This is equivalent to calling <c>init:stop()</c>.
+ </p></item>
+ </taglist>
+
+ <section>
+ <title>Events</title>
+ <p>Any event handler added to <c>erl_signal_server</c> must handle the following events.</p>
+ <taglist>
+ <tag><c>sighup</c></tag>
+ <item><p>Hangup detected on controlling terminal or death of controlling process</p></item>
+ <tag><c>sigquit</c></tag>
+ <item><p>Quit from keyboard</p></item>
+ <tag><c>sigabrt</c></tag>
+ <item><p>Abort signal from abort</p></item>
+ <tag><c>sigalrm</c></tag>
+ <item><p>Timer signal from alarm</p></item>
+ <tag><c>sigterm</c></tag>
+ <item><p>Termination signal</p></item>
+ <tag><c>sigusr1</c></tag>
+ <item><p>User-defined signal 1</p></item>
+ <tag><c>sigusr2</c></tag>
+ <item><p>User-defined signal 2</p></item>
+ <tag><c>sigchld</c></tag>
+ <item><p>Child process stopped or terminated</p></item>
+ <tag><c>sigstop</c></tag>
+ <item><p>Stop process</p></item>
+ <tag><c>sigtstp</c></tag>
+ <item><p>Stop typed at terminal</p></item>
+ </taglist>
+
+ <p>Setting OS signals are described in <seealso marker="os#set_signal/2"><c>os:set_signal/2</c></seealso>.</p>
+ </section>
+ </section>
+
+ <section>
<title>Configuration</title>
<p>The following configuration parameters are defined for the Kernel
application. For more information about configuration parameters,
@@ -379,6 +433,28 @@ MaxT = TickTime + TickTime / 4</code>
return as soon as possible for <c>application_controller</c>
to terminate properly.</p>
</item>
+ <tag><c>source_search_rules = [DirRule] | [SuffixRule] </c></tag>
+ <item>
+ <marker id="source_search_rules"></marker>
+ <p>Where:</p>
+ <list type="bulleted">
+ <item><c>DirRule = {ObjDirSuffix,SrcDirSuffix}</c></item>
+ <item><c>SuffixRule = {ObjSuffix,SrcSuffix,[DirRule]}</c></item>
+ <item><c>ObjDirSuffix = string()</c></item>
+ <item><c>SrcDirSuffix = string()</c></item>
+ <item><c>ObjSuffix = string()</c></item>
+ <item><c>SrcSuffix = string()</c></item>
+ </list>
+ <p>Specifies a list of rules for use by <c>filelib:find_file/2</c> and
+ <c>filelib:find_source/2</c>. If this is set to some other value
+ than the empty list, it replaces the default rules. Rules can be
+ simple pairs of directory suffixes, such as <c>{"ebin",
+ "src"}</c>, which are used by <c>filelib:find_file/2</c>, or
+ triples specifying separate directory suffix rules depending on
+ file name extensions, for example <c>[{".beam", ".erl", [{"ebin",
+ "src"}]}</c>, which are used by <c>filelib:find_source/2</c>. Both
+ kinds of rules can be mixed in the list.</p>
+ </item>
</taglist>
</section>
@@ -405,4 +481,3 @@ MaxT = TickTime + TickTime / 4</code>
<seealso marker="stdlib:timer"><c>timer(3)</c></seealso></p>
</section>
</appref>
-
diff --git a/lib/kernel/doc/src/notes.xml b/lib/kernel/doc/src/notes.xml
index 9277c2d353..d80c4f077c 100644
--- a/lib/kernel/doc/src/notes.xml
+++ b/lib/kernel/doc/src/notes.xml
@@ -108,7 +108,7 @@
<item>
<p>
Close stdin of commands run in os:cmd. This is a
- backwards compatiblity fix that restores the behaviour of
+ backwards compatibility fix that restores the behaviour of
pre 19.0 os:cmd.</p>
<p>
Own Id: OTP-13867 Aux Id: seq13178 </p>
@@ -1445,7 +1445,7 @@
dependent, so applications aiming to be portable should
consider using <c>{ipv6_v6only,true}</c> when creating an
<c>inet6</c> listening/destination socket, and if
- neccesary also create an <c>inet</c> socket on the same
+ necessary also create an <c>inet</c> socket on the same
port for IPv4 traffic. See the documentation.</p>
<p>
Own Id: OTP-8928 Aux Id: kunagi-193 [104] </p>
diff --git a/lib/kernel/doc/src/os.xml b/lib/kernel/doc/src/os.xml
index 739ac35d2a..6ba69d12a3 100644
--- a/lib/kernel/doc/src/os.xml
+++ b/lib/kernel/doc/src/os.xml
@@ -11,7 +11,7 @@
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-
+
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
@@ -19,7 +19,7 @@
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-
+
</legalnotice>
<title>os</title>
@@ -156,6 +156,32 @@ DirOut = os:cmd("dir"), % on Win32 platform</code>
</func>
<func>
+ <name name="set_signal" arity="2"/>
+ <fsummary>Enables or disables handling of OS signals.</fsummary>
+ <desc>
+ <p>Enables or disables OS signals.</p>
+ <p>Each signal my be set to one of the following options:</p>
+ <taglist>
+ <tag><c>ignore</c></tag>
+ <item>
+ This signal will be ignored.
+ </item>
+
+ <tag><c>default</c></tag>
+ <item>
+ This signal will use the default signal handler for the operating system.
+ </item>
+
+ <tag><c>handle</c></tag>
+ <item>
+ This signal will notify <c>erl_signal_server</c> when it is received by
+ the Erlang runtime system.
+ </item>
+ </taglist>
+ </desc>
+ </func>
+
+ <func>
<name name="system_time" arity="0"/>
<fsummary>Current OS system time.</fsummary>
<desc>
@@ -296,4 +322,3 @@ calendar:now_to_universal_time(TS),
</func>
</funcs>
</erlref>
-
diff --git a/lib/kernel/include/inet.hrl b/lib/kernel/include/inet.hrl
index b39df8c3f2..df788aca08 100644
--- a/lib/kernel/include/inet.hrl
+++ b/lib/kernel/include/inet.hrl
@@ -22,7 +22,7 @@
-record(hostent,
{
- h_name :: inet:hostname(), %% offical name of host
+ h_name :: inet:hostname(), %% official name of host
h_aliases = [] :: [inet:hostname()], %% alias list
h_addrtype :: 'inet' | 'inet6', %% host address type
h_length :: non_neg_integer(), %% length of address
diff --git a/lib/kernel/src/Makefile b/lib/kernel/src/Makefile
index 2b72f78dcf..2a89faaf13 100644
--- a/lib/kernel/src/Makefile
+++ b/lib/kernel/src/Makefile
@@ -71,6 +71,7 @@ MODULES = \
erl_distribution \
erl_epmd \
erl_reply \
+ erl_signal_handler \
erts_debug \
error_handler \
error_logger \
diff --git a/lib/kernel/src/code.erl b/lib/kernel/src/code.erl
index 5a7ca493cc..2a06d0cb15 100644
--- a/lib/kernel/src/code.erl
+++ b/lib/kernel/src/code.erl
@@ -489,13 +489,13 @@ prepare_check_uniq_1([], [_|_]=Errors) ->
{error,Errors}.
partition_on_load(Prep) ->
- P = fun({_,{Bin,_,_}}) ->
- erlang:has_prepared_code_on_load(Bin)
+ P = fun({_,{PC,_,_}}) ->
+ erlang:has_prepared_code_on_load(PC)
end,
lists:partition(P, Prep).
verify_prepared([{M,{Prep,Name,_Native}}|T])
- when is_atom(M), is_binary(Prep), is_list(Name) ->
+ when is_atom(M), is_list(Name) ->
try erlang:has_prepared_code_on_load(Prep) of
false ->
verify_prepared(T);
@@ -562,10 +562,10 @@ prepare_loading_fun() ->
GetNative = get_native_fun(),
fun(Mod, FullName, Beam) ->
case erlang:prepare_loading(Mod, Beam) of
- Prepared when is_binary(Prepared) ->
- {ok,{Prepared,FullName,GetNative(Beam)}};
{error,_}=Error ->
- Error
+ Error;
+ Prepared ->
+ {ok,{Prepared,FullName,GetNative(Beam)}}
end
end.
diff --git a/lib/kernel/src/dist_ac.erl b/lib/kernel/src/dist_ac.erl
index 6c2fa0b6b1..e63c969b79 100644
--- a/lib/kernel/src/dist_ac.erl
+++ b/lib/kernel/src/dist_ac.erl
@@ -123,7 +123,7 @@ load_application(AppName, DistNodes) ->
gen_server:call(?DIST_AC, {load_application, AppName, DistNodes}, infinity).
takeover_application(AppName, RestartType) ->
- case validRestartType(RestartType) of
+ case valid_restart_type(RestartType) of
true ->
wait_for_sync_dacs(),
Nodes = get_nodes(AppName),
@@ -1514,10 +1514,10 @@ dist_del_node(Appls, Node) ->
Appl#appl{run = NRun}
end, Appls).
-validRestartType(permanent) -> true;
-validRestartType(temporary) -> true;
-validRestartType(transient) -> true;
-validRestartType(_RestartType) -> false.
+valid_restart_type(permanent) -> true;
+valid_restart_type(temporary) -> true;
+valid_restart_type(transient) -> true;
+valid_restart_type(_RestartType) -> false.
dist_mismatch(AppName, Node) ->
error_msg("Distribution mismatch for application \"~p\" on nodes ~p and ~p~n",
diff --git a/lib/kernel/src/erl_signal_handler.erl b/lib/kernel/src/erl_signal_handler.erl
new file mode 100644
index 0000000000..8f924d2adc
--- /dev/null
+++ b/lib/kernel/src/erl_signal_handler.erl
@@ -0,0 +1,57 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1996-2013. All Rights Reserved.
+%%
+%% The contents of this file are subject to the Erlang Public License,
+%% Version 1.1, (the "License"); you may not use this file except in
+%% compliance with the License. You should have received a copy of the
+%% Erlang Public License along with this software. If not, it can be
+%% retrieved online at http://www.erlang.org/.
+%%
+%% Software distributed under the License is distributed on an "AS IS"
+%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
+%% the License for the specific language governing rights and limitations
+%% under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+-module(erl_signal_handler).
+-behaviour(gen_event).
+-export([init/1, format_status/2,
+ handle_event/2, handle_call/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-record(state,{}).
+
+init(_Args) ->
+ {ok, #state{}}.
+
+handle_event(sigusr1, S) ->
+ erlang:halt("Received SIGUSR1"),
+ {ok, S};
+handle_event(sigquit, S) ->
+ erlang:halt(),
+ {ok, S};
+handle_event(sigterm, S) ->
+ error_logger:info_msg("SIGTERM received - shutting down~n"),
+ ok = init:stop(),
+ {ok, S};
+handle_event(_SignalMsg, S) ->
+ {ok, S}.
+
+handle_info(_Info, S) ->
+ {ok, S}.
+
+handle_call(_Request, S) ->
+ {ok, ok, S}.
+
+format_status(_Opt, [_Pdict,_S]) ->
+ ok.
+
+code_change(_OldVsn, S, _Extra) ->
+ {ok, S}.
+
+terminate(_Args, _S) ->
+ ok.
diff --git a/lib/kernel/src/error_logger.erl b/lib/kernel/src/error_logger.erl
index 3523f680a3..3ee8e2c6e6 100644
--- a/lib/kernel/src/error_logger.erl
+++ b/lib/kernel/src/error_logger.erl
@@ -360,8 +360,12 @@ init(Max) when is_integer(Max) ->
%% go back.
init({go_back, _PostState}) ->
{ok, {?buffer_size, 0, []}};
-init(_) -> %% Start and just relay to other
- {ok, []}. %% node if node(GLeader) =/= node().
+init(_) ->
+ %% The error logger process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ process_flag(message_queue_data, off_heap),
+ {ok, []}.
-spec handle_event(term(), state()) -> {'ok', state()}.
diff --git a/lib/kernel/src/file.erl b/lib/kernel/src/file.erl
index 1971df9038..79e72cdc6d 100644
--- a/lib/kernel/src/file.erl
+++ b/lib/kernel/src/file.erl
@@ -1424,7 +1424,7 @@ path_open_first([Path|Rest], Name, Mode, LastError) ->
case open(FileName, Mode) of
{ok, Fd} ->
{ok, Fd, FileName};
- {error, enoent} ->
+ {error, Reason} when Reason =:= enoent; Reason =:= enotdir ->
path_open_first(Rest, Name, Mode, LastError);
Error ->
Error
diff --git a/lib/kernel/src/inet_parse.erl b/lib/kernel/src/inet_parse.erl
index b0a3ee3008..9b47199e08 100644
--- a/lib/kernel/src/inet_parse.erl
+++ b/lib/kernel/src/inet_parse.erl
@@ -701,8 +701,8 @@ dup(N, E, L) when is_integer(N), N >= 1 ->
-%% Convert IPv4 adress to ascii
-%% Convert IPv6 / IPV4 adress to ascii (plain format)
+%% Convert IPv4 address to ascii
+%% Convert IPv6 / IPV4 address to ascii (plain format)
ntoa({A,B,C,D}) ->
integer_to_list(A) ++ "." ++ integer_to_list(B) ++ "." ++
integer_to_list(C) ++ "." ++ integer_to_list(D);
diff --git a/lib/kernel/src/inet_udp.erl b/lib/kernel/src/inet_udp.erl
index 8a8aa8ecca..c69791b9aa 100644
--- a/lib/kernel/src/inet_udp.erl
+++ b/lib/kernel/src/inet_udp.erl
@@ -113,7 +113,7 @@ fdopen(Fd, Opts) ->
%% Here's how:
%% Reverse the list.
%% For each head option go through the tail and remove
-%% all occurences of the same option from the tail.
+%% all occurrences of the same option from the tail.
%% Store that head option and iterate using the new tail.
%% Return the list of stored head options.
optuniquify(List) ->
@@ -122,8 +122,8 @@ optuniquify(List) ->
optuniquify([], Result) ->
Result;
optuniquify([Opt | Tail], Result) ->
- %% Remove all occurences of Opt in Tail,
- %% prepend Opt to Result,
+ %% Remove all occurrences of Opt in Tail,
+ %% prepend Opt to Result,
%% then iterate back here.
optuniquify(Opt, Tail, [], Result).
diff --git a/lib/kernel/src/kernel.app.src b/lib/kernel/src/kernel.app.src
index 4d08a55c7c..25e4ddd95c 100644
--- a/lib/kernel/src/kernel.app.src
+++ b/lib/kernel/src/kernel.app.src
@@ -34,6 +34,7 @@
erl_boot_server,
erl_distribution,
erl_reply,
+ erl_signal_handler,
error_handler,
error_logger,
file,
diff --git a/lib/kernel/src/kernel.erl b/lib/kernel/src/kernel.erl
index 3d0ef81318..59eca242b1 100644
--- a/lib/kernel/src/kernel.erl
+++ b/lib/kernel/src/kernel.erl
@@ -32,6 +32,14 @@
start(_, []) ->
case supervisor:start_link({local, kernel_sup}, kernel, []) of
{ok, Pid} ->
+ %% add signal handler
+ case whereis(erl_signal_server) of
+ %% in case of minimal mode
+ undefined -> ok;
+ _ ->
+ ok = gen_event:add_handler(erl_signal_server, erl_signal_handler, [])
+ end,
+ %% add error handler
Type = get_error_logger_type(),
case error_logger:swap_handler(Type) of
ok -> {ok, Pid, []};
@@ -131,6 +139,9 @@ init([]) ->
permanent, 2000, worker, [inet_db]},
NetSup = {net_sup, {erl_distribution, start_link, []},
permanent, infinity, supervisor,[erl_distribution]},
+ SigSrv = #{id => erl_signal_server,
+ start => {gen_event, start_link, [{local, erl_signal_server}]},
+ type => worker, restart => permanent, shutdown => 2000, modules => dynamic},
DistAC = start_dist_ac(),
Timer = start_timer(),
@@ -141,7 +152,7 @@ init([]) ->
permanent, infinity, supervisor, [?MODULE]},
{ok, {SupFlags,
[Code, Rpc, Global, InetDb | DistAC] ++
- [NetSup, Glo_grp, File,
+ [NetSup, Glo_grp, File, SigSrv,
StdError, User, Config, SafeSupervisor] ++ Timer}}
end;
init(safe) ->
diff --git a/lib/kernel/src/os.erl b/lib/kernel/src/os.erl
index f8519d3a5e..c3ffcb3f70 100644
--- a/lib/kernel/src/os.erl
+++ b/lib/kernel/src/os.erl
@@ -29,7 +29,7 @@
-export([getenv/0, getenv/1, getenv/2, getpid/0,
perf_counter/0, perf_counter/1,
- putenv/2, system_time/0, system_time/1,
+ putenv/2, set_signal/2, system_time/0, system_time/1,
timestamp/0, unsetenv/1]).
-spec getenv() -> [string()].
@@ -104,6 +104,15 @@ timestamp() ->
unsetenv(_) ->
erlang:nif_error(undef).
+-spec set_signal(Signal, Option) -> 'ok' when
+ Signal :: 'sighup' | 'sigquit' | 'sigabrt' | 'sigalrm' |
+ 'sigterm' | 'sigusr1' | 'sigusr2' | 'sigchld' |
+ 'sigstop' | 'sigtstp',
+ Option :: 'default' | 'handle' | 'ignore'.
+
+set_signal(_Signal, _Option) ->
+ erlang:nif_error(undef).
+
%%% End of BIFs
-spec type() -> {Osfamily, Osname} when
diff --git a/lib/kernel/src/rpc.erl b/lib/kernel/src/rpc.erl
index 21bff02214..bd6ea26678 100644
--- a/lib/kernel/src/rpc.erl
+++ b/lib/kernel/src/rpc.erl
@@ -67,17 +67,27 @@
%%------------------------------------------------------------------------
+
+%% The rex server may receive a huge amount of
+%% messages. Make sure that they are stored off heap to
+%% avoid exessive GCs.
+
+-define(SPAWN_OPTS, [{spawn_opt,[{message_queue_data,off_heap}]}]).
+
%% Remote execution and broadcasting facility
-spec start() -> {'ok', pid()} | 'ignore' | {'error', term()}.
start() ->
- gen_server:start({local,?NAME}, ?MODULE, [], []).
+ gen_server:start({local,?NAME}, ?MODULE, [], ?SPAWN_OPTS).
-spec start_link() -> {'ok', pid()} | 'ignore' | {'error', term()}.
start_link() ->
- gen_server:start_link({local,?NAME}, ?MODULE, [], []).
+ %% The rex server process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ gen_server:start_link({local,?NAME}, ?MODULE, [], ?SPAWN_OPTS).
-spec stop() -> term().
diff --git a/lib/kernel/test/application_SUITE.erl b/lib/kernel/test/application_SUITE.erl
index 81407e9d96..b4cf31b210 100644
--- a/lib/kernel/test/application_SUITE.erl
+++ b/lib/kernel/test/application_SUITE.erl
@@ -1498,7 +1498,7 @@ otp_5363(Conf) when is_list(Conf) ->
%% Ticket: OTP-5606
%% Slogan: Problems with starting a distributed application
%%-----------------------------------------------------------------
-%% Test of several processes simultanously starting the same
+%% Test of several processes simultaneously starting the same
%% distributed application.
otp_5606(Conf) when is_list(Conf) ->
diff --git a/lib/kernel/test/code_SUITE.erl b/lib/kernel/test/code_SUITE.erl
index 4914ce9e4c..f368232bfc 100644
--- a/lib/kernel/test/code_SUITE.erl
+++ b/lib/kernel/test/code_SUITE.erl
@@ -323,7 +323,7 @@ load_abs(Config) when is_list(Config) ->
{error, nofile} = code:load_abs(TestDir ++ "/duuuumy_mod"),
{error, badfile} = code:load_abs(TestDir ++ "/code_a_test"),
{'EXIT', _} = (catch code:load_abs({})),
- {'EXIT', _} = (catch code:load_abs("Non-latin-имя-файла")),
+ {error, nofile} = code:load_abs("Non-latin-имя-файла"),
{module, code_b_test} = code:load_abs(TestDir ++ "/code_b_test"),
code:stick_dir(TestDir),
{error, sticky_directory} = code:load_abs(TestDir ++ "/code_b_test"),
diff --git a/lib/kernel/test/erl_distribution_SUITE.erl b/lib/kernel/test/erl_distribution_SUITE.erl
index f630896e03..09c80a0956 100644
--- a/lib/kernel/test/erl_distribution_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_SUITE.erl
@@ -233,7 +233,7 @@ time_ping(Node) ->
erlang:convert_time_unit(T1 - T0, native, millisecond).
%% Keep the connection with the client node up.
-%% This is neccessary as the client node runs with much shorter
+%% This is necessary as the client node runs with much shorter
%% tick time !!
keep_conn(Node) ->
sleep(1),
@@ -1059,7 +1059,7 @@ monitor_nodes_otp_6481_test(Config, TestType) when is_list(Config) ->
RemotePid = spawn(Node,
fun () ->
receive after 1500 -> ok end,
- %% infinit loop of msgs
+ %% infinite loop of msgs
%% we want an endless stream of messages and the kill
%% the node mercilessly.
%% We then want to ensure that the nodedown message arrives
diff --git a/lib/kernel/test/erl_distribution_wb_SUITE.erl b/lib/kernel/test/erl_distribution_wb_SUITE.erl
index 6a23ad0d11..61aa3b32ee 100644
--- a/lib/kernel/test/erl_distribution_wb_SUITE.erl
+++ b/lib/kernel/test/erl_distribution_wb_SUITE.erl
@@ -30,7 +30,7 @@
%% 1)
%%
-%% Connections are now always set up symetrically with respect to
+%% Connections are now always set up symmetrically with respect to
%% publication. If connecting node doesn't send DFLAG_PUBLISHED
%% the other node wont send DFLAG_PUBLISHED. If the connecting
%% node send DFLAG_PUBLISHED but the other node doesn't send
diff --git a/lib/kernel/test/error_logger_SUITE.erl b/lib/kernel/test/error_logger_SUITE.erl
index b6e7551741..bb01c2384d 100644
--- a/lib/kernel/test/error_logger_SUITE.erl
+++ b/lib/kernel/test/error_logger_SUITE.erl
@@ -30,6 +30,7 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2,
+ off_heap/1,
error_report/1, info_report/1, error/1, info/1,
emulator/1, tty/1, logfile/1, add/1, delete/1]).
@@ -45,7 +46,7 @@ suite() ->
{timetrap,{minutes,1}}].
all() ->
- [error_report, info_report, error, info, emulator, tty,
+ [off_heap, error_report, info_report, error, info, emulator, tty,
logfile, add, delete].
groups() ->
@@ -66,6 +67,16 @@ end_per_group(_GroupName, Config) ->
%%-----------------------------------------------------------------
+off_heap(_Config) ->
+ %% The error_logger process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ MQD = message_queue_data,
+ {MQD,off_heap} = process_info(whereis(error_logger), MQD),
+ ok.
+
+%%-----------------------------------------------------------------
+
error_report(Config) when is_list(Config) ->
error_logger:add_report_handler(?MODULE, self()),
Rep1 = [{tag1,"data1"},{tag2,data2},{tag3,3}],
diff --git a/lib/kernel/test/file_SUITE.erl b/lib/kernel/test/file_SUITE.erl
index f2094431d8..b402f01758 100644
--- a/lib/kernel/test/file_SUITE.erl
+++ b/lib/kernel/test/file_SUITE.erl
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
%%
-%% This is a developement feature when developing a new file module,
+%% This is a development feature when developing a new file module,
%% ugly but practical.
-ifndef(FILE_MODULE).
-define(FILE_MODULE, file).
diff --git a/lib/kernel/test/file_SUITE_data/realmen.html b/lib/kernel/test/file_SUITE_data/realmen.html
index c810a5d088..92e13f23b8 100644
--- a/lib/kernel/test/file_SUITE_data/realmen.html
+++ b/lib/kernel/test/file_SUITE_data/realmen.html
@@ -237,7 +237,7 @@ destroy most of the interesting uses for EQUIVALENCE, and make it
impossible to modify the operating system code with negative
subscripts. Worst of all, bounds checking is inefficient.
-<LI> Source code maintainance systems. A Real Programmer keeps his
+<LI> Source code maintenance systems. A Real Programmer keeps his
code locked up in a card file, because it implies that its owner
cannot leave his important programs unguarded [5].
@@ -396,7 +396,7 @@ double stuff Oreos for special occasions.
<LI> Underneath the Oreos is a flow-charting template, left there by
the previous occupant of the office. (Real Programmers write programs,
-not documentation. Leave that to the maintainence people.)
+not documentation. Leave that to the maintenance people.)
</UL> <P>
diff --git a/lib/kernel/test/multi_load_SUITE.erl b/lib/kernel/test/multi_load_SUITE.erl
index 369e25ac64..920839f4f9 100644
--- a/lib/kernel/test/multi_load_SUITE.erl
+++ b/lib/kernel/test/multi_load_SUITE.erl
@@ -144,14 +144,14 @@ prep_magic([H|T]) ->
prep_magic(Tuple) when is_tuple(Tuple) ->
L = prep_magic(tuple_to_list(Tuple)),
list_to_tuple(L);
-prep_magic(Bin) when is_binary(Bin) ->
- try erlang:has_prepared_code_on_load(Bin) of
+prep_magic(Ref) when is_reference(Ref) ->
+ try erlang:has_prepared_code_on_load(Ref) of
false ->
- %% Create a different kind of magic binary.
+ %% Create a different kind of magic ref.
ets:match_spec_compile([{'_',[true],['$_']}])
catch
_:_ ->
- Bin
+ Ref
end;
prep_magic(Other) ->
Other.
diff --git a/lib/kernel/test/rpc_SUITE.erl b/lib/kernel/test/rpc_SUITE.erl
index 1c72ddc87f..d76c4097d8 100644
--- a/lib/kernel/test/rpc_SUITE.erl
+++ b/lib/kernel/test/rpc_SUITE.erl
@@ -21,7 +21,8 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
--export([call/1, block_call/1, multicall/1, multicall_timeout/1,
+-export([off_heap/1,
+ call/1, block_call/1, multicall/1, multicall_timeout/1,
multicall_dies/1, multicall_node_dies/1,
called_dies/1, called_node_dies/1,
called_throws/1, call_benchmark/1, async_call/1]).
@@ -35,7 +36,7 @@ suite() ->
{timetrap,{minutes,2}}].
all() ->
- [call, block_call, multicall, multicall_timeout,
+ [off_heap, call, block_call, multicall, multicall_timeout,
multicall_dies, multicall_node_dies, called_dies,
called_node_dies, called_throws, call_benchmark,
async_call].
@@ -55,6 +56,13 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
Config.
+off_heap(_Config) ->
+ %% The rex server process may receive a huge amount of
+ %% messages. Make sure that they are stored off heap to
+ %% avoid exessive GCs.
+ MQD = message_queue_data,
+ {MQD,off_heap} = process_info(whereis(rex), MQD),
+ ok.
%% Test different rpc calls.
diff --git a/lib/megaco/src/text/megaco_text_gen_prev3a.hrl b/lib/megaco/src/text/megaco_text_gen_prev3a.hrl
index ae4a990779..9c75ee5926 100644
--- a/lib/megaco/src/text/megaco_text_gen_prev3a.hrl
+++ b/lib/megaco/src/text/megaco_text_gen_prev3a.hrl
@@ -424,7 +424,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid,
transactionResult = Res,
%% These fields are actually not
%% supported in this implementation,
- %% but because the messanger module
+ %% but because the messenger module
%% cannot see any diff between the
%% various v3 implementations...
segmentNumber = asn1_NOVALUE,
diff --git a/lib/megaco/src/text/megaco_text_gen_prev3b.hrl b/lib/megaco/src/text/megaco_text_gen_prev3b.hrl
index e7fb85d137..7e85be4d64 100644
--- a/lib/megaco/src/text/megaco_text_gen_prev3b.hrl
+++ b/lib/megaco/src/text/megaco_text_gen_prev3b.hrl
@@ -424,7 +424,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid,
transactionResult = Res,
%% These fields are actually not
%% supported in this implementation,
- %% but because the messanger module
+ %% but because the messenger module
%% cannot see any diff between the
%% various v3 implementations...
segmentNumber = asn1_NOVALUE,
diff --git a/lib/megaco/src/text/megaco_text_gen_prev3c.hrl b/lib/megaco/src/text/megaco_text_gen_prev3c.hrl
index 722e97a743..700392efe2 100644
--- a/lib/megaco/src/text/megaco_text_gen_prev3c.hrl
+++ b/lib/megaco/src/text/megaco_text_gen_prev3c.hrl
@@ -434,7 +434,7 @@ enc_TransactionReply(#'TransactionReply'{transactionId = Tid,
transactionResult = Res,
%% These fields are actually not
%% supported in this implementation,
- %% but because the messanger module
+ %% but because the messenger module
%% cannot see any diff between the
%% various v3 implementations...
segmentNumber = asn1_NOVALUE,
diff --git a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
index a83d1d77d2..62759c624b 100644
--- a/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
+++ b/lib/mnesia/doc/src/Mnesia_chap5.xmlsrc
@@ -362,11 +362,6 @@ ok
<seealso marker="mnesia_frag_hash">mnesia_frag_hash</seealso>
callback behavior. This property can explicitly be set at
table creation. Default is <c>mnesia_frag_hash</c>.</p>
- <p>Older tables, that were created before the concept of
- user-defined hash modules was introduced, use module
- <c>mnesia_frag_old_hash</c> to be backwards compatible.
- <c>mnesia_frag_old_hash</c> still uses the poor
- deprecated function <c>erlang:hash/1</c>.</p>
</item>
<tag><c>{hash_state, Term}</c></tag>
<item>
diff --git a/lib/mnesia/doc/src/notes.xml b/lib/mnesia/doc/src/notes.xml
index 51c98d0d3e..9f59759cb6 100644
--- a/lib/mnesia/doc/src/notes.xml
+++ b/lib/mnesia/doc/src/notes.xml
@@ -39,7 +39,23 @@
thus constitutes one section in this document. The title of each
section is the version number of Mnesia.</p>
- <section><title>Mnesia 4.14.2</title>
+ <section><title>Mnesia 4.14.3</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fixed crash in checkpoint handling when table was deleted
+ during backup.</p>
+ <p>
+ Own Id: OTP-14167</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Mnesia 4.14.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/mnesia/src/Makefile b/lib/mnesia/src/Makefile
index 5206e469a5..b68fc7d3d0 100644
--- a/lib/mnesia/src/Makefile
+++ b/lib/mnesia/src/Makefile
@@ -55,7 +55,6 @@ MODULES= \
mnesia_ext_sup \
mnesia_frag \
mnesia_frag_hash \
- mnesia_frag_old_hash \
mnesia_index \
mnesia_kernel_sup \
mnesia_late_loader \
diff --git a/lib/mnesia/src/mnesia.app.src b/lib/mnesia/src/mnesia.app.src
index af14826c90..a5d74d2d36 100644
--- a/lib/mnesia/src/mnesia.app.src
+++ b/lib/mnesia/src/mnesia.app.src
@@ -15,7 +15,6 @@
mnesia_ext_sup,
mnesia_frag,
mnesia_frag_hash,
- mnesia_frag_old_hash,
mnesia_index,
mnesia_kernel_sup,
mnesia_late_loader,
diff --git a/lib/mnesia/src/mnesia.erl b/lib/mnesia/src/mnesia.erl
index 6de7214776..dece995d39 100644
--- a/lib/mnesia/src/mnesia.erl
+++ b/lib/mnesia/src/mnesia.erl
@@ -316,7 +316,6 @@ ms() ->
mnesia_loader,
mnesia_frag,
mnesia_frag_hash,
- mnesia_frag_old_hash,
mnesia_index,
mnesia_kernel_sup,
mnesia_late_loader,
diff --git a/lib/mnesia/src/mnesia.hrl b/lib/mnesia/src/mnesia.hrl
index 0716dd87c8..da7e662288 100644
--- a/lib/mnesia/src/mnesia.hrl
+++ b/lib/mnesia/src/mnesia.hrl
@@ -49,12 +49,12 @@
%% It's important that counter is first, since we compare tid's
--record(tid,
+-record(tid,
{counter, %% serial no for tid
pid}). %% owner of tid
--record(tidstore,
+-record(tidstore,
{store, %% current ets table for tid
up_stores = [], %% list of upper layer stores for nested trans
level = 1}). %% transaction level
@@ -128,5 +128,4 @@
mnesia_lib:eval_debug_fun(I, C, ?FILE, ?LINE)).
-else.
-define(eval_debug_fun(I, C), ok).
--endif.
-
+-endif.
diff --git a/lib/mnesia/src/mnesia_checkpoint.erl b/lib/mnesia/src/mnesia_checkpoint.erl
index 9eb939e8d3..fc626940b4 100644
--- a/lib/mnesia/src/mnesia_checkpoint.erl
+++ b/lib/mnesia/src/mnesia_checkpoint.erl
@@ -909,7 +909,7 @@ retainer_loop(Cp = #checkpoint_args{name=Name}) ->
retainer_loop(Cp2);
{From, {iter_end, Iter}} ->
- retainer_fixtable(Iter#iter.oid_tab, false),
+ ?SAFE(retainer_fixtable(Iter#iter.oid_tab, false)),
Iters = Cp#checkpoint_args.iterators -- [Iter],
reply(From, Name, ok),
retainer_loop(Cp#checkpoint_args{iterators = Iters});
@@ -971,7 +971,8 @@ do_stop(Cp) ->
unset({checkpoint, Name}),
lists:foreach(fun deactivate_tab/1, Cp#checkpoint_args.retainers),
Iters = Cp#checkpoint_args.iterators,
- lists:foreach(fun(I) -> retainer_fixtable(I#iter.oid_tab, false) end, Iters).
+ [?SAFE(retainer_fixtable(Tab, false)) || #iter{main_tab=Tab} <- Iters],
+ ok.
deactivate_tab(R) ->
Name = R#retainer.cp_name,
@@ -1151,7 +1152,7 @@ do_change_copy(Cp, Tab, FromType, ToType) ->
Cp#checkpoint_args{retainers = Rs, nodes = writers(Rs)}.
check_iter(From, Iter) when Iter#iter.pid == From ->
- retainer_fixtable(Iter#iter.oid_tab, false),
+ ?SAFE(retainer_fixtable(Iter#iter.oid_tab, false)),
false;
check_iter(_From, _Iter) ->
true.
diff --git a/lib/mnesia/src/mnesia_event.erl b/lib/mnesia/src/mnesia_event.erl
index 7320d381ea..6f7531245f 100644
--- a/lib/mnesia/src/mnesia_event.erl
+++ b/lib/mnesia/src/mnesia_event.erl
@@ -114,7 +114,8 @@ handle_table_event({Oper, Record, TransId}, State) ->
handle_system_event({mnesia_checkpoint_activated, _Checkpoint}, State) ->
{ok, State};
-handle_system_event({mnesia_checkpoint_deactivated, _Checkpoint}, State) ->
+handle_system_event({mnesia_checkpoint_deactivated, Checkpoint}, State) ->
+ report_error("Checkpoint '~p' has been deactivated, last table copy deleted.\n",[Checkpoint]),
{ok, State};
handle_system_event({mnesia_up, Node}, State) ->
diff --git a/lib/mnesia/src/mnesia_frag.erl b/lib/mnesia/src/mnesia_frag.erl
index c6e812b36d..c39f30e140 100644
--- a/lib/mnesia/src/mnesia_frag.erl
+++ b/lib/mnesia/src/mnesia_frag.erl
@@ -58,9 +58,7 @@
-include("mnesia.hrl").
--define(OLD_HASH_MOD, mnesia_frag_old_hash).
-define(DEFAULT_HASH_MOD, mnesia_frag_hash).
-%%-define(DEFAULT_HASH_MOD, ?OLD_HASH_MOD). %% BUGBUG: New should be default
-record(frag_state,
{foreign_key,
@@ -80,7 +78,7 @@
lock(ActivityId, Opaque, {table , Tab}, LockKind) ->
case frag_names(Tab) of
[Tab] ->
- mnesia:lock(ActivityId, Opaque, {table, Tab}, LockKind);
+ mnesia:lock(ActivityId, Opaque, {table, Tab}, LockKind);
Frags ->
DeepNs = [mnesia:lock(ActivityId, Opaque, {table, F}, LockKind) ||
F <- Frags],
@@ -321,7 +319,7 @@ init_select(Tid,Opaque,Tab,Pat,Limit,LockKind) ->
{'EXIT', _} ->
mnesia:select(Tid, Opaque, Tab, Pat, Limit,LockKind);
FH ->
- FragNumbers = verify_numbers(FH,Pat),
+ FragNumbers = verify_numbers(FH,Pat),
Fun = fun(Num) ->
Name = n_to_frag_name(Tab, Num),
Node = val({Name, where_to_read}),
@@ -336,19 +334,19 @@ init_select(Tid,Opaque,Tab,Pat,Limit,LockKind) ->
end.
select_cont(_Tid,_,{frag_cont, '$end_of_table', [],_}) -> '$end_of_table';
-select_cont(Tid,Ts,{frag_cont, '$end_of_table', [{Tab,Node,Type}|Rest],Args}) ->
+select_cont(Tid,Ts,{frag_cont, '$end_of_table', [{Tab,Node,Type}|Rest],Args}) ->
{Spec,LockKind,Limit} = Args,
InitFun = fun(FixedSpec) -> mnesia:dirty_sel_init(Node,Tab,FixedSpec,Limit,Type) end,
Res = mnesia:fun_select(Tid,Ts,Tab,Spec,LockKind,Tab,InitFun,Limit,Node,Type),
frag_sel_cont(Res, Rest, Args);
-select_cont(Tid,Ts,{frag_cont, Cont, TabL, Args}) ->
+select_cont(Tid,Ts,{frag_cont, Cont, TabL, Args}) ->
frag_sel_cont(mnesia:select_cont(Tid,Ts,Cont),TabL,Args);
select_cont(Tid,Ts,Else) -> %% Not a fragmented table
mnesia:select_cont(Tid,Ts,Else).
frag_sel_cont('$end_of_table', [],_) ->
'$end_of_table';
-frag_sel_cont('$end_of_table', TabL,Args) ->
+frag_sel_cont('$end_of_table', TabL,Args) ->
{[], {frag_cont, '$end_of_table', TabL,Args}};
frag_sel_cont({Recs,Cont}, TabL,Args) ->
{Recs, {frag_cont, Cont, TabL,Args}}.
@@ -358,9 +356,9 @@ do_select(ActivityId, Opaque, Tab, MatchSpec, LockKind) ->
{'EXIT', _} ->
mnesia:select(ActivityId, Opaque, Tab, MatchSpec, LockKind);
FH ->
- FragNumbers = verify_numbers(FH,MatchSpec),
+ FragNumbers = verify_numbers(FH,MatchSpec),
Fun = fun(Num) ->
- Name = n_to_frag_name(Tab, Num),
+ Name = n_to_frag_name(Tab, Num),
Node = val({Name, where_to_read}),
mnesia:lock(ActivityId, Opaque, {table, Name}, LockKind),
{Name, Node}
@@ -398,7 +396,7 @@ do_select(ActivityId, Opaque, Tab, MatchSpec, LockKind) ->
verify_numbers(FH,MatchSpec) ->
HashState = FH#frag_state.hash_state,
- FragNumbers =
+ FragNumbers =
case FH#frag_state.hash_module of
HashMod when HashMod == ?DEFAULT_HASH_MOD ->
?DEFAULT_HASH_MOD:match_spec_to_frag_numbers(HashState, MatchSpec);
@@ -434,7 +432,7 @@ local_select(ReplyTo, Ref, RemoteNameNodes, MatchSpec) ->
end,
unlink(ReplyTo),
exit(normal).
-
+
remote_select(ReplyTo, Ref, NameNodes, MatchSpec) ->
do_remote_select(ReplyTo, Ref, NameNodes, MatchSpec).
@@ -805,22 +803,22 @@ make_deactivate(Tab) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Add a fragment to a fragmented table and fill it with half of
%% the records from one of the old fragments
-
+
make_multi_add_frag(Tab, SortedNs) when is_list(SortedNs) ->
verify_multi(Tab),
Ops = make_add_frag(Tab, SortedNs),
%% Propagate to foreigners
MoreOps = [make_add_frag(T, SortedNs) || T <- lookup_foreigners(Tab)],
- [Ops | MoreOps];
+ [Ops | MoreOps];
make_multi_add_frag(Tab, SortedNs) ->
mnesia:abort({bad_type, Tab, SortedNs}).
verify_multi(Tab) ->
FH = lookup_frag_hash(Tab),
ForeignKey = FH#frag_state.foreign_key,
- mnesia_schema:verify(undefined, ForeignKey,
- {combine_error, Tab,
+ mnesia_schema:verify(undefined, ForeignKey,
+ {combine_error, Tab,
"Op only allowed via foreign table",
{foreign_key, ForeignKey}}).
@@ -839,7 +837,7 @@ make_frag_names_and_acquire_locks(Tab, N, FragIndecies, DoNotLockN) ->
end,
FragNames = erlang:make_tuple(N, undefined),
lists:foldl(Fun, FragNames, FragIndecies).
-
+
make_add_frag(Tab, SortedNs) ->
Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
mnesia_schema:ensure_active(Cs),
@@ -849,8 +847,8 @@ make_add_frag(Tab, SortedNs) ->
FragNames = make_frag_names_and_acquire_locks(Tab, N, WriteIndecies, true),
NewFrag = element(N, FragNames),
- NR = length(Cs#cstruct.ram_copies),
- ND = length(Cs#cstruct.disc_copies),
+ NR = length(Cs#cstruct.ram_copies),
+ ND = length(Cs#cstruct.disc_copies),
NDO = length(Cs#cstruct.disc_only_copies),
NExt = length(Cs#cstruct.external_copies),
NewCs = Cs#cstruct{name = NewFrag,
@@ -859,7 +857,7 @@ make_add_frag(Tab, SortedNs) ->
disc_copies = [],
disc_only_copies = [],
external_copies = []},
-
+
{NewCs2, _, _} = set_frag_nodes(NR, ND, NDO, NExt, NewCs, SortedNs, []),
[NewOp] = mnesia_schema:make_create_table(NewCs2),
@@ -944,7 +942,7 @@ do_split(FH, OldN, FragNames, [Rec | Recs], Ops) ->
Key = element(2, Rec),
NewOid = {NewFrag, Key},
OldOid = {OldFrag, Key},
- Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
{op, rec, unknown, {OldOid, [OldOid], delete}} | Ops],
do_split(FH, OldN, FragNames, Recs, Ops2);
_NewFrag ->
@@ -958,7 +956,7 @@ do_split(_FH, _OldN, _FragNames, [], Ops) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Delete a fragment from a fragmented table
%% and merge its records with another fragment
-
+
make_multi_del_frag(Tab) ->
verify_multi(Tab),
Ops = make_del_frag(Tab),
@@ -1064,7 +1062,7 @@ do_merge(FH, OldN, FragNames, [Rec | Recs], Ops) ->
Key = element(2, Rec),
NewOid = {NewFrag, Key},
OldOid = {OldFrag, Key},
- Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
+ Ops2 = [{op, rec, unknown, {NewOid, [Rec], write}},
{op, rec, unknown, {OldOid, [OldOid], delete}} | Ops],
do_merge(FH, OldN, FragNames, Recs, Ops2);
_NewFrag ->
@@ -1077,7 +1075,7 @@ do_merge(FH, OldN, FragNames, [Rec | Recs], Ops) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Add a node to the node pool of a fragmented table
-
+
make_multi_add_node(Tab, Node) ->
verify_multi(Tab),
Ops = make_add_node(Tab, Node),
@@ -1085,7 +1083,7 @@ make_multi_add_node(Tab, Node) ->
%% Propagate to foreigners
MoreOps = [make_add_node(T, Node) || T <- lookup_foreigners(Tab)],
[Ops | MoreOps].
-
+
make_add_node(Tab, Node) when is_atom(Node) ->
Pool = lookup_prop(Tab, node_pool),
case lists:member(Node, Pool) of
@@ -1114,7 +1112,7 @@ make_multi_del_node(Tab, Node) ->
%% Propagate to foreigners
MoreOps = [make_del_node(T, Node) || T <- lookup_foreigners(Tab)],
[Ops | MoreOps].
-
+
make_del_node(Tab, Node) when is_atom(Node) ->
Cs = mnesia_schema:incr_version(val({Tab, cstruct})),
mnesia_schema:ensure_active(Cs),
@@ -1147,8 +1145,8 @@ remove_node(Node, Cs) ->
case lists:member(Node, Pool) of
true ->
Pool2 = Pool -- [Node],
- Props = lists:keyreplace(node_pool, 1,
- Cs#cstruct.frag_properties,
+ Props = lists:keyreplace(node_pool, 1,
+ Cs#cstruct.frag_properties,
{node_pool, Pool2}),
{Cs#cstruct{frag_properties = Props}, true};
false ->
@@ -1180,18 +1178,10 @@ props_to_frag_hash(Tab, Props) ->
T when T == Tab ->
Foreign = mnesia_schema:pick(Tab, foreign_key, Props, must),
N = mnesia_schema:pick(Tab, n_fragments, Props, must),
-
case mnesia_schema:pick(Tab, hash_module, Props, undefined) of
undefined ->
- Split = mnesia_schema:pick(Tab, next_n_to_split, Props, must),
- Doubles = mnesia_schema:pick(Tab, n_doubles, Props, must),
- FH = {frag_hash, Foreign, N, Split, Doubles},
- HashState = ?OLD_HASH_MOD:init_state(Tab, FH),
- #frag_state{foreign_key = Foreign,
- n_fragments = N,
- hash_module = ?OLD_HASH_MOD,
- hash_state = HashState};
- HashMod ->
+ no_hash;
+ HashMod ->
HashState = mnesia_schema:pick(Tab, hash_state, Props, must),
#frag_state{foreign_key = Foreign,
n_fragments = N,
@@ -1216,13 +1206,9 @@ lookup_frag_hash(Tab) ->
case ?catch_val({Tab, frag_hash}) of
FH when is_record(FH, frag_state) ->
FH;
- {frag_hash, K, N, _S, _D} = FH ->
+ {frag_hash, _K, _N, _S, _D} ->
%% Old style. Kept for backwards compatibility.
- HashState = ?OLD_HASH_MOD:init_state(Tab, FH),
- #frag_state{foreign_key = K,
- n_fragments = N,
- hash_module = ?OLD_HASH_MOD,
- hash_state = HashState};
+ mnesia:abort({no_hash, Tab, frag_properties, frag_hash});
{'EXIT', _} ->
mnesia:abort({no_exists, Tab, frag_properties, frag_hash})
end.
@@ -1249,10 +1235,10 @@ key_pos(FH) ->
case FH#frag_state.foreign_key of
undefined ->
2;
- {_ForeignTab, Pos} ->
+ {_ForeignTab, Pos} ->
Pos
end.
-
+
%% Returns name of fragment table
key_to_frag_name({BaseTab, _} = Tab, Key) ->
N = key_to_frag_number(Tab, Key),
diff --git a/lib/mnesia/src/mnesia_frag_old_hash.erl b/lib/mnesia/src/mnesia_frag_old_hash.erl
deleted file mode 100644
index b246c76236..0000000000
--- a/lib/mnesia/src/mnesia_frag_old_hash.erl
+++ /dev/null
@@ -1,133 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2002-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%%
-%%%----------------------------------------------------------------------
-%%% Purpose : Implements hashing functionality for fragmented tables
-%%%----------------------------------------------------------------------
-
--module(mnesia_frag_old_hash).
-%%-behaviour(mnesia_frag_hash).
-
--compile({nowarn_deprecated_function, {erlang,hash,2}}).
-
-%% Hashing callback functions
--export([
- init_state/2,
- add_frag/1,
- del_frag/1,
- key_to_frag_number/2,
- match_spec_to_frag_numbers/2
- ]).
-
--record(old_hash_state,
- {n_fragments,
- next_n_to_split,
- n_doubles}).
-
-%% Old style. Kept for backwards compatibility.
--record(frag_hash,
- {foreign_key,
- n_fragments,
- next_n_to_split,
- n_doubles}).
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-init_state(_Tab, InitialState) when InitialState == undefined ->
- #old_hash_state{n_fragments = 1,
- next_n_to_split = 1,
- n_doubles = 0};
-init_state(_Tab, FH) when is_record(FH, frag_hash) ->
- %% Old style. Kept for backwards compatibility.
- #old_hash_state{n_fragments = FH#frag_hash.n_fragments,
- next_n_to_split = FH#frag_hash.next_n_to_split,
- n_doubles = FH#frag_hash.n_doubles}.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-add_frag(State) when is_record(State, old_hash_state) ->
- SplitN = State#old_hash_state.next_n_to_split,
- P = SplitN + 1,
- L = State#old_hash_state.n_doubles,
- NewN = State#old_hash_state.n_fragments + 1,
- State2 = case trunc(math:pow(2, L)) + 1 of
- P2 when P2 == P ->
- State#old_hash_state{n_fragments = NewN,
- next_n_to_split = 1,
- n_doubles = L + 1};
- _ ->
- State#old_hash_state{n_fragments = NewN,
- next_n_to_split = P}
- end,
- {State2, [SplitN], [NewN]}.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-del_frag(State) when is_record(State, old_hash_state) ->
- P = State#old_hash_state.next_n_to_split - 1,
- L = State#old_hash_state.n_doubles,
- N = State#old_hash_state.n_fragments,
- if
- P < 1 ->
- L2 = L - 1,
- MergeN = trunc(math:pow(2, L2)),
- State2 = State#old_hash_state{n_fragments = N - 1,
- next_n_to_split = MergeN,
- n_doubles = L2},
- {State2, [N], [MergeN]};
- true ->
- MergeN = P,
- State2 = State#old_hash_state{n_fragments = N - 1,
- next_n_to_split = MergeN},
- {State2, [N], [MergeN]}
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-key_to_frag_number(State, Key) when is_record(State, old_hash_state) ->
- L = State#old_hash_state.n_doubles,
- A = erlang:hash(Key, trunc(math:pow(2, L))),
- P = State#old_hash_state.next_n_to_split,
- if
- A < P ->
- erlang:hash(Key, trunc(math:pow(2, L + 1)));
- true ->
- A
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-match_spec_to_frag_numbers(State, MatchSpec) when is_record(State, old_hash_state) ->
- case MatchSpec of
- [{HeadPat, _, _}] when is_tuple(HeadPat), tuple_size(HeadPat) > 2 ->
- KeyPat = element(2, HeadPat),
- case has_var(KeyPat) of
- false ->
- [key_to_frag_number(State, KeyPat)];
- true ->
- lists:seq(1, State#old_hash_state.n_fragments)
- end;
- _ ->
- lists:seq(1, State#old_hash_state.n_fragments)
- end.
-
-has_var(Pat) ->
- mnesia:has_var(Pat).
diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl
index ab78c9b13e..ff58974aba 100644
--- a/lib/mnesia/src/mnesia_monitor.erl
+++ b/lib/mnesia/src/mnesia_monitor.erl
@@ -169,7 +169,7 @@ check_protocol([{Node, {accept, Mon, Version, Protocol}} | Tail], Protocols) ->
verbose("Failed to connect with ~p. ~p protocols rejected. "
"expected version = ~p, expected protocol = ~p~n",
[Node, Protocols, Version, Protocol]),
- unlink(Mon), % Get rid of unneccessary link
+ unlink(Mon), % Get rid of unnecessary link
check_protocol(Tail, Protocols)
end;
check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) ->
diff --git a/lib/mnesia/src/mnesia_schema.erl b/lib/mnesia/src/mnesia_schema.erl
index 0e4017e4c3..b0d7965886 100644
--- a/lib/mnesia/src/mnesia_schema.erl
+++ b/lib/mnesia/src/mnesia_schema.erl
@@ -1941,7 +1941,7 @@ make_change_table_copy_type(Tab, Node, ToS) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% change index functions ....
-%% Pos is allready added by 1 in both of these functions
+%% Pos is already added by 1 in both of these functions
add_table_index(Tab, Pos) ->
schema_transaction(fun() -> do_add_table_index(Tab, Pos) end).
diff --git a/lib/mnesia/test/mnesia_evil_backup.erl b/lib/mnesia/test/mnesia_evil_backup.erl
index e745ec9b04..044cf501fd 100644
--- a/lib/mnesia/test/mnesia_evil_backup.erl
+++ b/lib/mnesia/test/mnesia_evil_backup.erl
@@ -723,18 +723,18 @@ bup_records(File, Mod) ->
exit(Reason)
end.
-sops_with_checkpoint(doc) ->
+sops_with_checkpoint(doc) ->
["Test schema operations during a checkpoint"];
sops_with_checkpoint(suite) -> [];
sops_with_checkpoint(Config) when is_list(Config) ->
- Ns = ?acquire_nodes(2, Config),
-
+ Ns = [N1,N2] = ?acquire_nodes(2, Config),
+
?match({ok, cp1, Ns}, mnesia:activate_checkpoint([{name, cp1},{max,mnesia:system_info(tables)}])),
- Tab = tab,
+ Tab = tab,
?match({atomic, ok}, mnesia:create_table(Tab, [{disc_copies,Ns}])),
OldRecs = [{Tab, K, -K} || K <- lists:seq(1, 5)],
[mnesia:dirty_write(R) || R <- OldRecs],
-
+
?match({ok, cp2, Ns}, mnesia:activate_checkpoint([{name, cp2},{max,mnesia:system_info(tables)}])),
File1 = "cp1_delete_me.BUP",
?match(ok, mnesia:dirty_write({Tab,6,-6})),
@@ -742,16 +742,16 @@ sops_with_checkpoint(Config) when is_list(Config) ->
?match(ok, mnesia:dirty_write({Tab,7,-7})),
File2 = "cp2_delete_me.BUP",
?match(ok, mnesia:backup_checkpoint(cp2, File2)),
-
+
?match(ok, mnesia:deactivate_checkpoint(cp1)),
?match(ok, mnesia:backup_checkpoint(cp2, File1)),
?match(ok, mnesia:dirty_write({Tab,8,-8})),
-
+
?match({atomic,ok}, mnesia:delete_table(Tab)),
?match({error,_}, mnesia:backup_checkpoint(cp2, File2)),
?match({'EXIT',_}, mnesia:dirty_write({Tab,9,-9})),
- ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])),
+ ?match({atomic,_}, mnesia:restore(File1, [{default_op, recreate_tables}])),
Test = fun(N) when N > 5 -> ?error("To many records in backup ~p ~n", [N]);
(N) -> case mnesia:dirty_read(Tab,N) of
[{Tab,N,B}] when -B =:= N -> ok;
@@ -759,8 +759,29 @@ sops_with_checkpoint(Config) when is_list(Config) ->
end
end,
[Test(N) || N <- mnesia:dirty_all_keys(Tab)],
- ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])),
-
+ ?match({aborted,enoent}, mnesia:restore(File2, [{default_op, recreate_tables}])),
+
+ %% Mnesia crashes when deleting a table during backup
+ ?match([], mnesia_test_lib:stop_mnesia([N2])),
+ Tab2 = ram,
+ ?match({atomic, ok}, mnesia:create_table(Tab2, [{ram_copies,[N1]}])),
+ ?match({ok, cp3, _}, mnesia:activate_checkpoint([{name, cp3},
+ {ram_overrides_dump,true},
+ {min,[Tab2]}])),
+ Write = fun Loop (N) ->
+ case N > 0 of
+ true ->
+ mnesia:dirty_write({Tab2, N+100, N+100}),
+ Loop(N-1);
+ false ->
+ ok
+ end
+ end,
+ ok = Write(100000),
+ spawn_link(fun() -> ?match({atomic, ok},mnesia:delete_table(Tab2)) end),
+
+ %% We don't check result here, depends on timing of above call
+ mnesia:backup_checkpoint(cp3, File2),
file:delete(File1), file:delete(File2),
- ?verify_mnesia(Ns, []).
+ ?verify_mnesia([N1], [N2]).
diff --git a/lib/mnesia/vsn.mk b/lib/mnesia/vsn.mk
index 439b21e58c..e272a469bb 100644
--- a/lib/mnesia/vsn.mk
+++ b/lib/mnesia/vsn.mk
@@ -1 +1 @@
-MNESIA_VSN = 4.14.2
+MNESIA_VSN = 4.14.3
diff --git a/lib/observer/src/cdv_bin_cb.erl b/lib/observer/src/cdv_bin_cb.erl
index 0cea1fdcf0..200c728a62 100644
--- a/lib/observer/src/cdv_bin_cb.erl
+++ b/lib/observer/src/cdv_bin_cb.erl
@@ -58,7 +58,7 @@ binary_to_term_fun(Bin) ->
try binary_to_term(Bin) of
Term -> plain_html(io_lib:format("~p",[Term]))
catch error:badarg ->
- Warning = "This binary can not be coverted to an Erlang term",
+ Warning = "This binary can not be converted to an Erlang term",
observer_html_lib:warning(Warning)
end
end.
diff --git a/lib/observer/src/cdv_detail_wx.erl b/lib/observer/src/cdv_detail_wx.erl
index 44f121f359..5782339183 100644
--- a/lib/observer/src/cdv_detail_wx.erl
+++ b/lib/observer/src/cdv_detail_wx.erl
@@ -55,7 +55,7 @@ init([Id, Data, ParentFrame, Callback, Parent]) ->
end,
{stop,normal};
{info,Info} ->
- observer_lib:display_info_dialog(Info),
+ observer_lib:display_info_dialog(ParentFrame,Info),
{stop,normal}
end.
diff --git a/lib/observer/src/observer_app_wx.erl b/lib/observer/src/observer_app_wx.erl
index 936b2783e2..80a41fdde9 100644
--- a/lib/observer/src/observer_app_wx.erl
+++ b/lib/observer/src/observer_app_wx.erl
@@ -191,8 +191,8 @@ handle_event(#wx{event=#wxMouse{type=Type, x=X0, y=Y0}},
end;
handle_event(#wx{event=#wxCommand{type=command_menu_selected}},
- State = #state{sel=undefined}) ->
- observer_lib:display_info_dialog("Select process first"),
+ State = #state{panel=Panel,sel=undefined}) ->
+ observer_lib:display_info_dialog(Panel,"Select process first"),
{noreply, State};
handle_event(#wx{id=?ID_PROC_INFO, event=#wxCommand{type=command_menu_selected}},
@@ -205,7 +205,7 @@ handle_event(#wx{id=?ID_PROC_MSG, event=#wxCommand{type=command_menu_selected}},
case observer_lib:user_term(Panel, "Enter message", "") of
cancel -> ok;
{ok, Term} -> Pid ! Term;
- {error, Error} -> observer_lib:display_info_dialog(Error)
+ {error, Error} -> observer_lib:display_info_dialog(Panel,Error)
end,
{noreply, State};
@@ -214,7 +214,7 @@ handle_event(#wx{id=?ID_PROC_KILL, event=#wxCommand{type=command_menu_selected}}
case observer_lib:user_term(Panel, "Enter Exit Reason", "kill") of
cancel -> ok;
{ok, Term} -> exit(Pid, Term);
- {error, Error} -> observer_lib:display_info_dialog(Error)
+ {error, Error} -> observer_lib:display_info_dialog(Panel,Error)
end,
{noreply, State};
diff --git a/lib/observer/src/observer_lib.erl b/lib/observer/src/observer_lib.erl
index 1eaba31a3a..47844c1307 100644
--- a/lib/observer/src/observer_lib.erl
+++ b/lib/observer/src/observer_lib.erl
@@ -20,7 +20,7 @@
-module(observer_lib).
-export([get_wx_parent/1,
- display_info_dialog/1, display_yes_no_dialog/1,
+ display_info_dialog/2, display_yes_no_dialog/1,
display_progress_dialog/2, destroy_progress_dialog/0,
wait_for_progress/0, report_progress/1,
user_term/3, user_term_multiline/3,
@@ -105,10 +105,10 @@ setup_timer(Bool, {Timer, Old}) ->
timer:cancel(Timer),
setup_timer(Bool, {false, Old}).
-display_info_dialog(Str) ->
- display_info_dialog("",Str).
-display_info_dialog(Title,Str) ->
- Dlg = wxMessageDialog:new(wx:null(), Str, [{caption,Title}]),
+display_info_dialog(Parent,Str) ->
+ display_info_dialog(Parent,"",Str).
+display_info_dialog(Parent,Title,Str) ->
+ Dlg = wxMessageDialog:new(Parent, Str, [{caption,Title}]),
wxMessageDialog:showModal(Dlg),
wxMessageDialog:destroy(Dlg),
ok.
@@ -724,7 +724,7 @@ progress_loop(Title,PD,Caller) ->
if is_list(Reason) -> Reason;
true -> file:format_error(Reason)
end,
- display_info_dialog("Crashdump Viewer Error",FailMsg),
+ display_info_dialog(PD,"Crashdump Viewer Error",FailMsg),
Caller ! error,
unregister(?progress_handler),
unlink(Caller);
diff --git a/lib/observer/src/observer_port_wx.erl b/lib/observer/src/observer_port_wx.erl
index 53ba3fa607..c21d2705c0 100644
--- a/lib/observer/src/observer_port_wx.erl
+++ b/lib/observer/src/observer_port_wx.erl
@@ -267,10 +267,19 @@ handle_cast(Event, _State) ->
error({unhandled_cast, Event}).
handle_info({portinfo_open, PortIdStr},
- State = #state{grid=Grid, ports=Ports, open_wins=Opened}) ->
- Port = lists:keyfind(PortIdStr,#port.id_str,Ports),
- NewOpened = display_port_info(Grid, Port, Opened),
- {noreply, State#state{open_wins = NewOpened}};
+ State = #state{node=Node, grid=Grid, opt=Opt, open_wins=Opened}) ->
+ Ports0 = get_ports(Node),
+ Ports = update_grid(Grid, Opt, Ports0),
+ Port = lists:keyfind(PortIdStr, #port.id_str, Ports),
+ NewOpened =
+ case Port of
+ false ->
+ self() ! {error,"No such port: " ++ PortIdStr},
+ Opened;
+ _ ->
+ display_port_info(Grid, Port, Opened)
+ end,
+ {noreply, State#state{ports=Ports, open_wins=NewOpened}};
handle_info(refresh_interval, State = #state{node=Node, grid=Grid, opt=Opt,
ports=OldPorts}) ->
@@ -296,8 +305,9 @@ handle_info(not_active, State = #state{timer = Timer0}) ->
Timer = observer_lib:stop_timer(Timer0),
{noreply, State#state{timer=Timer}};
-handle_info({error, Error}, State) ->
- handle_error(Error),
+handle_info({error, Error}, #state{panel=Panel} = State) ->
+ Str = io_lib:format("ERROR: ~s~n",[Error]),
+ observer_lib:display_info_dialog(Panel, Str),
{noreply, State};
handle_info(_Event, State) ->
@@ -501,11 +511,6 @@ filter_monitor_info() ->
[Pid || {process, Pid} <- Ms]
end.
-
-handle_error(Foo) ->
- Str = io_lib:format("ERROR: ~s~n",[Foo]),
- observer_lib:display_info_dialog(Str).
-
update_grid(Grid, Opt, Ports) ->
wx:batch(fun() -> update_grid2(Grid, Opt, Ports) end).
update_grid2(Grid, #opt{sort_key=Sort,sort_incr=Dir}, Ports) ->
diff --git a/lib/observer/src/observer_procinfo.erl b/lib/observer/src/observer_procinfo.erl
index c13b164ff9..21eb9facc5 100644
--- a/lib/observer/src/observer_procinfo.erl
+++ b/lib/observer/src/observer_procinfo.erl
@@ -92,7 +92,7 @@ init([Pid, ParentFrame, Parent]) ->
observer_wx:return_to_localnode(ParentFrame, node(Pid)),
{stop, badrpc};
process_undefined ->
- observer_lib:display_info_dialog("No such alive process"),
+ observer_lib:display_info_dialog(ParentFrame,"No such alive process"),
{stop, normal}
end.
diff --git a/lib/observer/src/observer_tv_wx.erl b/lib/observer/src/observer_tv_wx.erl
index 968a7620aa..4356cb890c 100644
--- a/lib/observer/src/observer_tv_wx.erl
+++ b/lib/observer/src/observer_tv_wx.erl
@@ -238,8 +238,9 @@ handle_info(not_active, State = #state{timer = Timer0}) ->
Timer = observer_lib:stop_timer(Timer0),
{noreply, State#state{timer=Timer}};
-handle_info({error, Error}, #state{opt=Opt}=State) ->
- handle_error(Error),
+handle_info({error, Error}, #state{panel=Panel,opt=Opt}=State) ->
+ Str = io_lib:format("ERROR: ~s~n",[Error]),
+ observer_lib:display_info_dialog(Panel,Str),
case Opt#opt.type of
mnesia -> wxMenuBar:check(observer_wx:get_menubar(), ?ID_ETS, true);
_ -> ok
@@ -365,10 +366,6 @@ list_to_strings([A]) -> integer_to_list(A);
list_to_strings([A|B]) ->
integer_to_list(A) ++ " ," ++ list_to_strings(B).
-handle_error(Foo) ->
- Str = io_lib:format("ERROR: ~s~n",[Foo]),
- observer_lib:display_info_dialog(Str).
-
update_grid(Grid, Opt, Tables) ->
wx:batch(fun() -> update_grid2(Grid, Opt, Tables) end).
update_grid2(Grid, #opt{sort_key=Sort,sort_incr=Dir}, Tables) ->
diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl
index 5732c12006..3031a1f90d 100644
--- a/lib/observer/src/observer_wx.erl
+++ b/lib/observer/src/observer_wx.erl
@@ -467,10 +467,10 @@ handle_info(_Info, State) ->
stop_servers(#state{node=Node, log=LogOn, sys_panel=Sys, pro_panel=Procs, tv_panel=TVs,
trace_panel=Trace, app_panel=Apps, perf_panel=Perfs,
- allc_panel=Alloc} = _State) ->
+ allc_panel=Alloc, port_panel=Ports} = _State) ->
LogOn andalso rpc:block_call(Node, rb, stop, []),
Me = self(),
- Tabs = [Sys, Procs, TVs, Trace, Apps, Perfs, Alloc],
+ Tabs = [Sys, Procs, Ports, TVs, Trace, Apps, Perfs, Alloc],
Stop = fun() ->
try
_ = [wx_object:stop(Panel) || Panel <- Tabs],
@@ -580,9 +580,10 @@ get_active_pid(#state{notebook=Notebook, pro_panel=Pro, sys_panel=Sys,
pid2panel(Pid, #state{pro_panel=Pro, sys_panel=Sys,
tv_panel=Tv, trace_panel=Trace, app_panel=App,
- perf_panel=Perf, allc_panel=Alloc}) ->
+ perf_panel=Perf, allc_panel=Alloc, port_panel=Port}) ->
case Pid of
Pro -> "Processes";
+ Port -> "Ports";
Sys -> "System";
Tv -> "Table Viewer" ;
Trace -> ?TRACE_STR;
diff --git a/lib/observer/test/observer_SUITE.erl b/lib/observer/test/observer_SUITE.erl
index 4c882ad951..b5fb027878 100644
--- a/lib/observer/test/observer_SUITE.erl
+++ b/lib/observer/test/observer_SUITE.erl
@@ -34,7 +34,8 @@
%% Test cases
-export([app_file/1, appup_file/1,
- basic/1, process_win/1, table_win/1
+ basic/1, process_win/1, table_win/1,
+ port_win_when_tab_not_initiated/1
]).
%% Default timetrap timeout (set in init_per_testcase)
@@ -49,7 +50,8 @@ groups() ->
[{gui, [],
[basic,
process_win,
- table_win
+ table_win,
+ port_win_when_tab_not_initiated
]
}].
@@ -299,6 +301,17 @@ table_win(Config) when is_list(Config) ->
observer:stop(),
ok.
+%% Test PR-1296/OTP-14151
+%% Clicking a link to a port before the port tab has been activated the
+%% first time crashes observer.
+port_win_when_tab_not_initiated(Config) ->
+ {ok,Port} = gen_tcp:listen(0,[]),
+ ok = observer:start(),
+ Notebook = setup_whitebox_testing(),
+ observer ! {open_link,erlang:port_to_list(Port)},
+ timer:sleep(1000),
+ observer:stop(),
+ ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/orber/src/orber_iiop.hrl b/lib/orber/src/orber_iiop.hrl
index 6bc82fb6d6..1b5d6a84ef 100644
--- a/lib/orber/src/orber_iiop.hrl
+++ b/lib/orber/src/orber_iiop.hrl
@@ -279,8 +279,8 @@
%%----------------------------------------------------------------------
%% Profile Body
%%
-%% iiop_version: describes the version of IIOP that the agent at the
-%% specified adress is prepared to receive.
+%% iiop_version: describes the version of IIOP that the agent at the
+%% specified address is prepared to receive.
%% host: identifies the internet host to which the GIOP messages
%% for the specified object may be sent.
%% port: contains the TCP?IP port number where the target agnet is listening
diff --git a/lib/orber/src/orber_initial_references.erl b/lib/orber/src/orber_initial_references.erl
index 738d702088..8caf69a68b 100644
--- a/lib/orber/src/orber_initial_references.erl
+++ b/lib/orber/src/orber_initial_references.erl
@@ -89,7 +89,7 @@ install(Timeout, Options) ->
end,
Wait = mnesia:wait_for_tables([orber_references], Timeout),
- %% Check if any error has occured yet. If there are errors, return them.
+ %% Check if any error has occurred yet. If there are errors, return them.
if
DB_Result == {atomic, ok},
Wait == ok ->
diff --git a/lib/orber/src/orber_objectkeys.erl b/lib/orber/src/orber_objectkeys.erl
index 1233e4e721..3b1851e9b5 100644
--- a/lib/orber/src/orber_objectkeys.erl
+++ b/lib/orber/src/orber_objectkeys.erl
@@ -344,7 +344,7 @@ install(Timeout, Options) ->
end,
Wait = mnesia:wait_for_tables([orber_objkeys], Timeout),
- %% Check if any error has occured yet. If there are errors, return them.
+ %% Check if any error has occurred yet. If there are errors, return them.
if
DB_Result == {atomic, ok},
Wait == ok ->
diff --git a/lib/os_mon/src/memsup.erl b/lib/os_mon/src/memsup.erl
index 4729d090f8..0a9a883390 100644
--- a/lib/os_mon/src/memsup.erl
+++ b/lib/os_mon/src/memsup.erl
@@ -701,6 +701,7 @@ get_os_wordsize_with_uname() ->
"sparc64" -> 64;
"amd64" -> 64;
"ppc64" -> 64;
+ "s390x" -> 64;
_ -> 32
end.
diff --git a/lib/parsetools/src/leex.erl b/lib/parsetools/src/leex.erl
index 602e47404d..e0f37ae9df 100644
--- a/lib/parsetools/src/leex.erl
+++ b/lib/parsetools/src/leex.erl
@@ -1264,7 +1264,7 @@ pack_dfa([], _, Rs, PDFA) -> {PDFA,Rs}.
%% {Action, AcceptLength, CurrTokLen, RestChars, Line, State}.
%% The return CurrTokLen is always the current number of characters
-%% scanned in the current token. The returns have the follwoing
+%% scanned in the current token. The returns have the following
%% meanings:
%% {Action, AcceptLength, RestChars, Line} -
%% The scanner has reached an accepting end-state, for example after
@@ -1281,7 +1281,7 @@ pack_dfa([], _, Rs, PDFA) -> {PDFA,Rs}.
%%
%% {reject, AcceptLength, CurrTokLen, RestChars, Line, State} -
%% {Action, AcceptLength, CurrTokLen, RestChars, Line, State} -
-%% The scanner has reached a non-accepting transistion state. If
+%% The scanner has reached a non-accepting transition state. If
%% RestChars == [] we need to get more characters to continue.
%% Otherwise if 'reject' then no accepting state has been reached it
%% is an error. If we have an Action and AcceptLength then these are
diff --git a/lib/percept/AUTHORS b/lib/percept/AUTHORS
deleted file mode 100644
index f6c040ae76..0000000000
--- a/lib/percept/AUTHORS
+++ /dev/null
@@ -1,4 +0,0 @@
-Original Authors and Contributors:
-
-Bj�rn-Egil Dahlberg
-Magnus Tho�ng
diff --git a/lib/percept/Makefile b/lib/percept/Makefile
deleted file mode 100644
index 1f51bd2fef..0000000000
--- a/lib/percept/Makefile
+++ /dev/null
@@ -1,35 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-
-# ----------------------------------------------------
-# Common Macros
-# ----------------------------------------------------
-
-SUB_DIRECTORIES = src priv doc/src
-
-SPECIAL_TARGETS =
-
-# ----------------------------------------------------
-# Default Subdir Targets
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_subdir.mk
diff --git a/lib/percept/c_src/.gitignore b/lib/percept/c_src/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/c_src/.gitignore
+++ /dev/null
diff --git a/lib/percept/doc/html/.gitignore b/lib/percept/doc/html/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/doc/html/.gitignore
+++ /dev/null
diff --git a/lib/percept/doc/man3/.gitignore b/lib/percept/doc/man3/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/doc/man3/.gitignore
+++ /dev/null
diff --git a/lib/percept/doc/pdf/.gitignore b/lib/percept/doc/pdf/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/doc/pdf/.gitignore
+++ /dev/null
diff --git a/lib/percept/doc/src/Makefile b/lib/percept/doc/src/Makefile
deleted file mode 100644
index 2f84d61cbc..0000000000
--- a/lib/percept/doc/src/Makefile
+++ /dev/null
@@ -1,190 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../../vsn.mk
-VSN=$(PERCEPT_VSN)
-APPLICATION=percept
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/$(APPLICATION)-$(VSN)
-
-# ----------------------------------------------------
-# Help application directory specification
-# ----------------------------------------------------
-
-EDOC_DIR = $(ERL_TOP)/lib/edoc
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-PERCEPT_DIR = $(ERL_TOP)/lib/$(APPLICATION)/src
-RUNTIME_TOOLS_DIR = $(ERL_TOP)/lib/runtime_tools/src
-
-PERCEPT_MODULES = \
- egd\
- percept
-
-RUNTIME_TOOLS_MODULES = \
- percept_profile
-
-XML_APPLICATION_FILES = \
- ref_man.xml
-
-PERCEPT_XML_FILES = $(PERCEPT_MODULES:=.xml)
-
-RUNTIME_TOOLS_XML_FILES = $(RUNTIME_TOOLS_MODULES:=.xml)
-
-MODULE_XML_FILES = $(PERCEPT_XML_FILES) $(RUNTIME_TOOLS_XML_FILES)
-
-XML_REF_MAN = \
- ref_man.xml
-
-XML_REF3_FILES = $(MODULE_XML_FILES)
-
-XML_PART_FILES = \
- part.xml \
- part_notes.xml
-
-XML_REF6_FILES =
-
-XML_CHAPTER_FILES = \
- notes.xml \
- egd_ug.xml \
- percept_ug.xml
-
-GEN_XML = \
- egd_ug.xml \
- percept_ug.xml
-
-BOOK_FILES = book.xml
-
-XML_FILES = \
- $(BOOK_FILES) $(XML_CHAPTER_FILES) \
- $(XML_PART_FILES) $(XML_REF3_FILES) $(XML_REF_MAN)
-
-HTML_EXAMPLE_FILES = \
- percept_examples.html
-
-HTML_STYLESHEET_FILES = \
- ../stylesheet.css
-
-
-GIF_FILES = \
- test1.gif \
- test2.gif \
- test3.gif \
- test4.gif \
- percept_overview.gif \
- percept_processes.gif \
- percept_processinfo.gif \
- percept_compare.gif \
- img_esi_result.gif
-
-# ----------------------------------------------------
-INFO_FILE = ../../info
-
-HTML_FILES = \
- $(XML_REF_MAN:%.xml=$(HTMLDIR)/%.html) \
- $(XML_PART_FILES:%.xml=$(HTMLDIR)/%.html)
-
-MAN3_FILES = $(XML_REF3_FILES:%.xml=$(MAN3DIR)/%.3)
-MAN6_FILES = $(XML_REF6_FILES:%_app.xml=$(MAN6DIR)/%.6)
-
-
-HTML_REF_MAN_FILE = $(HTMLDIR)/index.html
-
-TOP_PDF_FILE = $(PDFDIR)/$(APPLICATION)-$(VSN).pdf
-
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-XML_FLAGS +=
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-$(HTMLDIR)/%.gif: %.gif
- $(INSTALL_DATA) $< $@
-
-docs: pdf html man
-
-$(TOP_PDF_FILE): $(XML_FILES)
-
-pdf: $(TOP_PDF_FILE)
-
-html: gifs $(HTML_REF_MAN_FILE)
-
-clean clean_docs:
- rm -f $(MODULE_XML_FILES) $(GEN_XML)
- rm -rf $(HTMLDIR)/*
- rm -f $(MAN3DIR)/*
- rm -f $(TOP_PDF_FILE) $(TOP_PDF_FILE:%.pdf=%.fo)
- rm -f errs core *~
-
-man: $(MAN3_FILES) $(MAN6_FILES)
-
-gifs: $(GIF_FILES:%=$(HTMLDIR)/%)
-
-xml: $(MODULE_XML_FILES)
-
-$(PERCEPT_XML_FILES):
- escript $(DOCGEN)/priv/bin/xml_from_edoc.escript $(PERCEPT_DIR)/$(@:%.xml=%.erl)
-
-$(RUNTIME_TOOLS_XML_FILES):
- escript $(DOCGEN)/priv/bin/xml_from_edoc.escript $(RUNTIME_TOOLS_DIR)/$(@:%.xml=%.erl)
-
-info:
- @echo "XML_PART_FILES: $(XML_PART_FILES)"
- @echo "XML_APPLICATION_FILES: $(XML_APPLICATION_FILES)"
- @echo "PERCEPT_XML_FILES: $(MODULE_XML_FILES)"
- @echo "PERCEPT_MODULES: $(PERCEPT_MODULES)"
- @echo "HTML_FILES: $(HTML_FILES)"
- @echo "HTMLDIR: $(HTMLDIR)"
-
-
-debug opt:
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_docs_spec: docs
- $(INSTALL_DIR) "$(RELSYSDIR)/doc/pdf"
- $(INSTALL_DATA) $(TOP_PDF_FILE) "$(RELSYSDIR)/doc/pdf"
- $(INSTALL_DIR) "$(RELSYSDIR)/doc/html"
- $(INSTALL_DATA) $(HTML_EXAMPLE_FILES) $(HTML_STYLESHEET_FILES) \
- $(HTMLDIR)/* \
- "$(RELSYSDIR)/doc/html"
- $(INSTALL_DATA) $(INFO_FILE) "$(RELSYSDIR)"
- $(INSTALL_DIR) "$(RELEASE_PATH)/man/man3"
- $(INSTALL_DATA) $(MAN3DIR)/* "$(RELEASE_PATH)/man/man3"
-
-release_spec:
-
diff --git a/lib/percept/doc/src/book.xml b/lib/percept/doc/src/book.xml
deleted file mode 100644
index 5acba1f214..0000000000
--- a/lib/percept/doc/src/book.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE book SYSTEM "book.dtd">
-
-<book xmlns:xi="http://www.w3.org/2001/XInclude">
- <header titlestyle="normal">
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>2007-11-02</date>
- <rev>0.5.0</rev>
- <file>book.xml</file>
- </header>
- <insidecover>
- </insidecover>
- <pagetext>Percept</pagetext>
- <preamble>
- <contents level="2"></contents>
- </preamble>
- <parts lift="no">
- <xi:include href="part.xml"/>
- </parts>
- <applications>
- <xi:include href="ref_man.xml"/>
- </applications>
- <releasenotes>
- <xi:include href="notes.xml"/>
- </releasenotes>
- <listofterms></listofterms>
- <index></index>
-</book>
-
diff --git a/lib/percept/doc/src/egd_ug.xmlsrc b/lib/percept/doc/src/egd_ug.xmlsrc
deleted file mode 100644
index 85d41ada79..0000000000
--- a/lib/percept/doc/src/egd_ug.xmlsrc
+++ /dev/null
@@ -1,90 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>egd</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>2007-11-03</date>
- <rev>A</rev>
- <file>egd_ug.xml</file>
- </header>
- <section>
- <title>Introduction</title>
- <p>
- The egd module is an interface for 2d-image rendering and is used by
- Percept to generate dynamic graphs to its web pages. All code is pure
- erlang, no drivers needed.
- </p>
- <p>
- The library is intended for small to medium image sizes with low
- complexity for optimal performance. The library handles horizontal
- lines better then vertical lines.
- </p>
- <p>
- The foremost purpose for this module is to enable users to
- generate images from erlang code and/or datasets and to
- send these images to either files or web servers.
- </p>
- </section>
- <section>
- <title>File example</title>
- <p>Drawing examples:</p>
- <codeinclude file="img.erl" tag="" type="none"></codeinclude>
- <p> First save. </p>
- <image file="test1.gif">
- <icaption>test1.png</icaption>
- </image>
-
- <p> Second save. </p>
- <image file="test2.gif">
- <icaption>test2.png</icaption>
- </image>
-
- <p> Third save. </p>
- <image file="test3.gif">
- <icaption>test3.png</icaption>
- </image>
-
- <p> Fourth save. </p>
- <image file="test4.gif">
- <icaption>test4.png</icaption>
- </image>
- </section>
- <section>
- <title>ESI example</title>
- <p>Using egd with inets ESI to generate images on the fly:</p>
- <codeinclude file="img_esi.erl" tag="" type="none"></codeinclude>
- <image file="img_esi_result.gif">
- <icaption>Example of result.</icaption>
- </image>
- <p>
- For more information regarding ESI, please see inets application
- <seealso marker="inets:mod_esi">mod_esi</seealso>.
- </p>
- </section>
-</chapter>
-
-
diff --git a/lib/percept/doc/src/fascicules.xml b/lib/percept/doc/src/fascicules.xml
deleted file mode 100644
index 37feca543f..0000000000
--- a/lib/percept/doc/src/fascicules.xml
+++ /dev/null
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE fascicules SYSTEM "fascicules.dtd">
-
-<fascicules>
- <fascicule file="part" href="part_frame.html" entry="no">
- User's Guide
- </fascicule>
- <fascicule file="ref_man" href="ref_man_frame.html" entry="yes">
- Reference Manual
- </fascicule>
- <fascicule file="part_notes" href="part_notes_frame.html" entry="no">
- Release Notes
- </fascicule>
- <fascicule file="" href="../../../../doc/print.html" entry="no">
- Off-Print
- </fascicule>
-</fascicules>
-
diff --git a/lib/percept/doc/src/img.erl b/lib/percept/doc/src/img.erl
deleted file mode 100644
index 8f3bd3839f..0000000000
--- a/lib/percept/doc/src/img.erl
+++ /dev/null
@@ -1,50 +0,0 @@
--module(img).
-
--export([do/0]).
-
-do() ->
- Im = egd:create(200,200),
- Red = egd:color({255,0,0}),
- Green = egd:color({0,255,0}),
- Blue = egd:color({0,0,255}),
- Black = egd:color({0,0,0}),
- Yellow = egd:color({255,255,0}),
-
- % Line and fillRectangle
-
- egd:filledRectangle(Im, {20,20}, {180,180}, Red),
- egd:line(Im, {0,0}, {200,200}, Black),
-
- egd:save(egd:render(Im, png), "/home/egil/test1.png"),
-
- egd:filledEllipse(Im, {45, 60}, {55, 70}, Yellow),
- egd:filledEllipse(Im, {145, 60}, {155, 70}, Blue),
-
- egd:save(egd:render(Im, png), "/home/egil/test2.png"),
-
- R = 80,
- X0 = 99,
- Y0 = 99,
-
- Pts = [ { X0 + trunc(R*math:cos(A*math:pi()*2/360)),
- Y0 + trunc(R*math:sin(A*math:pi()*2/360))
- } || A <- lists:seq(0,359,5)],
- lists:map(
- fun({X,Y}) ->
- egd:rectangle(Im, {X-5, Y-5}, {X+5,Y+5}, Green)
- end, Pts),
-
- egd:save(egd:render(Im, png), "/home/egil/test3.png"),
-
- % Text
- Filename = filename:join([code:priv_dir(percept), "fonts", "6x11_latin1.wingsfont"]),
- Font = egd_font:load(Filename),
- {W,H} = egd_font:size(Font),
- String = "egd says hello",
- Length = length(String),
-
- egd:text(Im, {round(100 - W*Length/2), 200 - H - 5}, Font, String, Black),
-
- egd:save(egd:render(Im, png), "/home/egil/test4.png"),
-
- egd:destroy(Im).
diff --git a/lib/percept/doc/src/img_esi.erl b/lib/percept/doc/src/img_esi.erl
deleted file mode 100644
index e9796819c0..0000000000
--- a/lib/percept/doc/src/img_esi.erl
+++ /dev/null
@@ -1,25 +0,0 @@
--module(img_esi).
-
--export([image/3]).
-
-image(SessionID, _Env, _Input) ->
- mod_esi:deliver(SessionID, header()),
- Binary = my_image(),
- mod_esi:deliver(SessionID, binary_to_list(Binary)).
-
-my_image() ->
- Im = egd:create(300,20),
- Black = egd:color({0,0,0}),
- Red = egd:color({255,0,0}),
- egd:filledRectangle(Im, {30,14}, {270,19}, Red),
- egd:rectangle(Im, {30,14}, {270,19}, Black),
-
- Filename = filename:join([code:priv_dir(percept), "fonts", "6x11_latin1.wingsfont"]),
- Font = egd_font:load(Filename),
- egd:text(Im, {30, 0}, Font, "egd with esi callback", Black),
- Bin = egd:render(Im, png),
- egd:destroy(Im),
- Bin.
-
-header() ->
- "Content-Type: image/png\r\n\r\n".
diff --git a/lib/percept/doc/src/img_esi_result.gif b/lib/percept/doc/src/img_esi_result.gif
deleted file mode 100644
index 6973392998..0000000000
--- a/lib/percept/doc/src/img_esi_result.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/ipc_tree.erl b/lib/percept/doc/src/ipc_tree.erl
deleted file mode 100644
index 89360379c6..0000000000
--- a/lib/percept/doc/src/ipc_tree.erl
+++ /dev/null
@@ -1,30 +0,0 @@
--module(ipc_tree).
--export([go/1, init/2]).
-
-go(N) ->
- start(N, self()),
- receive {_,stop} -> ok end.
-
-start(Depth, ParentPid) ->
- spawn(?MODULE, init, [Depth, ParentPid]).
-
-init(0, ParentPid) ->
- workload(5000),
- ParentPid ! {self(),stop},
- ok;
-init(Depth, ParentPid) ->
- Pid1 = spawn(?MODULE, init, [Depth - 1, self()]),
- Pid2 = spawn(?MODULE, init, [Depth - 1, self()]),
- main([Pid1,Pid2], ParentPid).
-
-main(Pids, ParentPid) ->
- workload(5000),
- gather(Pids),
- ParentPid ! {self(),stop},
- ok.
-
-gather([]) -> ok;
-gather([Pid|Pids]) -> receive {Pid,stop} -> gather(Pids) end.
-
-workload(0) -> ok;
-workload(N) -> math:sin(2), workload(N - 1).
diff --git a/lib/percept/doc/src/notes.xml b/lib/percept/doc/src/notes.xml
deleted file mode 100644
index c9d5d3ae29..0000000000
--- a/lib/percept/doc/src/notes.xml
+++ /dev/null
@@ -1,495 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept Release Notes</title>
- <prepared>otp_appnotes</prepared>
- <docno>nil</docno>
- <date>nil</date>
- <rev>nil</rev>
- <file>notes.xml</file>
- </header>
- <p>This document describes the changes made to the Percept application.</p>
-
-<section><title>Percept 0.9</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Remove deprecated <c>erlang:now/0</c> calls</p>
- <p>
- Own Id: OTP-13422</p>
- </item>
- </list>
- </section>
-
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Improve line implementation</p>
- <p>
- Add capabilities for line thickness and anti-aliasing.</p>
- <p>
- Own Id: OTP-13598</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.11</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Fix http server configuration</p>
- <p>
- Own Id: OTP-12662</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.10</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Make sure to install .hrl files when needed</p>
- <p>
- Own Id: OTP-12197</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.9</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Application upgrade (appup) files are corrected for the
- following applications: </p>
- <p>
- <c>asn1, common_test, compiler, crypto, debugger,
- dialyzer, edoc, eldap, erl_docgen, et, eunit, gs, hipe,
- inets, observer, odbc, os_mon, otp_mibs, parsetools,
- percept, public_key, reltool, runtime_tools, ssh,
- syntax_tools, test_server, tools, typer, webtool, wx,
- xmerl</c></p>
- <p>
- A new test utility for testing appup files is added to
- test_server. This is now used by most applications in
- OTP.</p>
- <p>
- (Thanks to Tobias Schlager)</p>
- <p>
- Own Id: OTP-11744</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.8.2</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- The encoding of the <c>notes.xml</c> file has been
- changed from latin1 to utf-8 to avoid future merge
- problems.</p>
- <p>
- Own Id: OTP-11310</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.8.1</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p> Postscript files no longer needed for the generation
- of PDF files have been removed. </p>
- <p>
- Own Id: OTP-11016</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.8</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Misc build updates</p>
- <p>
- Own Id: OTP-10784</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.7</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Add missing modules in app-file</p>
- <p>
- Own Id: OTP-10439</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.6.1</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Miscellaneous documentation build updates</p>
- <p>
- Own Id: OTP-9813</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.6</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Fix message handling in select requests</p>
- <p>
- percept_db used to send results in untagged messages, and
- use a non selective receive to extract them. When percept
- is used from the shell process, this can confuse other
- messages with the actual result.</p>
- <p>
- Add a tag to the message to be {result, Result}. Add
- demonitor to avoid keeping DOWN message in the queue fix
- one spec in do_start/0</p>
- <p>
- (Thanks to Ahmed Omar)</p>
- <p>
- Own Id: OTP-9490</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.5</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p> Fixes a race condition found in percept_db start/1
- function. (Thanks to Ahmed Omar) </p>
- <p>
- Own Id: OTP-9012</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.4</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Fix egd_render transparent to use float constants.</p>
- <p>
- The render engine has float guards to enhance beam code
- generation. However, the default case used integers which
- caused the engine to crash. This is now fixed.</p>
- <p>
- Own Id: OTP-8425</p>
- </item>
- </list>
- </section>
-
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>The documentation is now possible to build in an open
- source environment after a number of bugs are fixed and
- some features are added in the documentation build
- process. </p>
- <p>- The arity calculation is updated.</p>
- <p>- The module prefix used in the function names for
- bif's are removed in the generated links so the links
- will look like
- "http://www.erlang.org/doc/man/erlang.html#append_element-2"
- instead of
- "http://www.erlang.org/doc/man/erlang.html#erlang:append_element-2".</p>
- <p>- Enhanced the menu positioning in the html
- documentation when a new page is loaded.</p>
- <p>- A number of corrections in the generation of man
- pages (thanks to Sergei Golovan)</p>
- <p>- The legal notice is taken from the xml book file so
- OTP's build process can be used for non OTP
- applications.</p>
- <p>
- Own Id: OTP-8343</p>
- </item>
- <item>
- <p>
- Cleanups suggested by tidier and modernization of types
- and specs.</p>
- <p>
- Own Id: OTP-8455</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.3</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- The documentation is now built with open source tools
- (xsltproc and fop) that exists on most platforms. One
- visible change is that the frames are removed.</p>
- <p>
- Own Id: OTP-8201</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.2</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- Extensions to <c>egd:color/1</c> for using atoms as color
- definition in addition to rgb triplets.</p>
- <p>
- Own Id: OTP-7975</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.8.1</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p><c>egd</c> now supports encapsulated postscript output
- format.</p>
- <p>
- Own Id: OTP-7923</p>
- </item>
- </list>
- </section>
-
-</section>
-
- <section><title>Percept 0.8</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>A problem with options list to percept causing some
- options to be disregarded unintentionally. This has now
- been fixed.</p> <p>An error in <c>percept_analyzer</c>
- caused calculation of standard deviation to be incorrect.
- This has now been corrected.</p>
- <p>
- Own Id: OTP-7693</p>
- </item>
- </list>
- </section>
-
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>Updated css for percept server for enhanced
- viewing.</p> <p>Increased performance of egd render.</p>
- <p>Several graph errors could occur when compacting data
- to decrease graph rendering time causing incorrect
- scalability numbers. These errors have now been
- fixed.</p> <p>Increased viewing width for graphs. The
- viewing width is now dependent on client screen
- resolution.</p>
- <p>
- Own Id: OTP-7696</p>
- </item>
- </list>
- </section>
-
-</section>
-<section><title>Percept 0.7.3</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>External pids caused the webserver to crash. This has
- now been fixed.</p>
- <p>
- Own Id: OTP-7515 Aux Id: seq11004 </p>
- </item>
- <item>
- <p>Fixed a timestamp problem where some events could be
- sent out of order. Minor fixes to presentation of
- data.</p>
- <p>
- Own Id: OTP-7544 Aux Id: otp-7442 </p>
- </item>
- </list>
- </section>
-
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>Performance enhancement for the egd render engine
- (Thanks to Magnus Thoäng).</p>
- <p>
- Own Id: OTP-7616</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.7.2</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>Calling <c>egd:destroy/1</c> did not properly remove
- the process holding the image.</p>
- <p>Synchronous calls done via the egd interface could
- erroneous receive messages not intended for egd. Messages
- are now tagged in such a way so this should not
- occur.</p>
- <p>
- Own Id: OTP-7336</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.7.1</title>
-
- <section><title>Fixed Bugs and Malfunctions</title>
- <list>
- <item>
- <p>
- Fixed out of bounds rendering problem in egd which could
- cause the rendering process to crash.</p>
- <p>
- Own Id: OTP-7215</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.7</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>Percept no longer depends on external c-libraries. The
- graphical rendering is now done via erlang code.</p>
- <p>
- Own Id: OTP-7162</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section><title>Percept 0.6.2</title>
-
- <section><title>Improvements and New Features</title>
- <list>
- <item>
- <p>
- A new module, percept_profile, can now be used to collect
- profiling data even if the percept application is not
- installed. This should help profiling erlang application
- on target machines without libgd installed.</p>
- <p>
- Own Id: OTP-7126</p>
- </item>
- </list>
- </section>
-
-</section>
-
-<section>
- <title>Percept 0.5.0</title>
- <section><title>First Release</title>
- <list>
- <item>
- <p>
- First Release.
- </p>
- <p>Own Id: OTP-6783</p>
- </item>
- </list>
- </section>
- </section>
-</chapter>
-
diff --git a/lib/percept/doc/src/part.xml b/lib/percept/doc/src/part.xml
deleted file mode 100644
index 277d89d45c..0000000000
--- a/lib/percept/doc/src/part.xml
+++ /dev/null
@@ -1,47 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE part SYSTEM "part.dtd">
-
-<part xmlns:xi="http://www.w3.org/2001/XInclude">
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept User's Guide</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>2007-11-02</date>
- <rev>0.5.0</rev>
- <file>part.xml</file>
- </header>
- <description>
- <p>
- <em>Percept</em> is an acronym for <em>P</em>ercept - <em>er</em>lang
- <em>c</em>oncurr<em>e</em>ncy <em>p</em>rofiling <em>t</em>ool.
- </p>
- <p>
- It is a tool to visualize application level concurrency and
- identify concurrency bottlenecks.
- </p>
- </description>
- <xi:include href="percept_ug.xml"/>
- <xi:include href="egd_ug.xml"/>
-</part>
-
diff --git a/lib/percept/doc/src/part_notes.xml b/lib/percept/doc/src/part_notes.xml
deleted file mode 100644
index f428b4fd81..0000000000
--- a/lib/percept/doc/src/part_notes.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE part SYSTEM "part.dtd">
-
-<part xmlns:xi="http://www.w3.org/2001/XInclude">
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept Release Notes</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>>2007-11-02</date>
- <rev></rev>
- <file>part_notes.xml</file>
- </header>
- <description>
- <p>
- The <em>Percept</em> application.
- </p>
- </description>
- <xi:include href="notes.xml"/>
-</part>
-
diff --git a/lib/percept/doc/src/percept_compare.gif b/lib/percept/doc/src/percept_compare.gif
deleted file mode 100644
index 1c8ccf0186..0000000000
--- a/lib/percept/doc/src/percept_compare.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/percept_examples.html b/lib/percept/doc/src/percept_examples.html
deleted file mode 100644
index df2f52bdfd..0000000000
--- a/lib/percept/doc/src/percept_examples.html
+++ /dev/null
@@ -1,11 +0,0 @@
-<meta http-equiv="Context-Type" content="text/html; charset=iso-8859-1">
-<?xml version="1.0" encoding="iso-8859-1"?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd ">
-<html xmlns="http://www.w3.org/1999/xhtml" ><head>
-<title>Customization functions</title>
-<link rel="stylesheet" type="text/css" href="stylesheet.css">
-</head>
-<body>
-<h1>Customization functions</h1>
-</body>
-</html>
diff --git a/lib/percept/doc/src/percept_overview.gif b/lib/percept/doc/src/percept_overview.gif
deleted file mode 100644
index 12ac172472..0000000000
--- a/lib/percept/doc/src/percept_overview.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/percept_processes.gif b/lib/percept/doc/src/percept_processes.gif
deleted file mode 100644
index 640ff50ee2..0000000000
--- a/lib/percept/doc/src/percept_processes.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/percept_processinfo.gif b/lib/percept/doc/src/percept_processinfo.gif
deleted file mode 100644
index 00cc05f5c9..0000000000
--- a/lib/percept/doc/src/percept_processinfo.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/percept_ug.xmlsrc b/lib/percept/doc/src/percept_ug.xmlsrc
deleted file mode 100644
index 0d243cdabe..0000000000
--- a/lib/percept/doc/src/percept_ug.xmlsrc
+++ /dev/null
@@ -1,223 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE chapter SYSTEM "chapter.dtd">
-
-<chapter>
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept</title>
- <prepared>Björn-Egil Dahlberg</prepared>
- <docno></docno>
- <date>2007-11-02</date>
- <rev>A</rev>
- <file>percept_ug.xml</file>
- </header>
- <p>
- Percept, or Percept - Erlang Concurrency Profiling Tool, utilizes trace
- informations and profiler events to form a picture of the processes's and
- ports runnability.
- </p>
-
- <section>
- <title>Introduction</title>
- <p>
- Percept uses <c>erlang:trace/3</c> and <c>erlang:system_profile/2</c> to monitor events from
- process states. Such states are,</p>
- <list>
- <item>waiting</item>
- <item>running</item>
- <item>runnable</item>
- <item>free</item>
- <item>exiting</item>
- </list>
- <p>
- There are some other states too, <c>suspended</c>, <c>hibernating</c>, and
- garbage collecting (<c>gc</c>). The only ignored state is <c>gc</c> and a process is considered to have
- its previous state through out the entire garbage collecting phase. The main reason for this, is that our
- model considers the <c>gc</c> as a third state neither active nor inactive.
- </p>
- <p>
- A waiting or suspended process is considered an inactive process and a running or
- runnable process is considered an active process.
- </p>
- <p>
- Events are collected and stored to a file. The file can be moved and
- analyzed on a different machine than the target machine.
- </p>
- <p>
- Note, even if percept is not installed on your target machine, profiling
- can still be done via the module <seealso marker="percept_profile">percept_profile</seealso>
- located in runtime_tools.
- </p>
- </section>
- <section>
- <title>Getting started</title>
- <section>
- <title>Profiling</title>
- <p>
- There are a few ways to start the profiling of a specific code. The
- command <c>percept:profile/3</c> is a preferred way.
- </p>
- <p>
- The command takes a filename for the data destination file as first
- argument, a callback entry-point as second argument and a
- list of specific profiler options, for instance <c>procs</c>, as third
- argument.
- </p>
- <p>
- Let's say we have a module called example that initializes our
- profiling-test and let it run under some defined manner designed by ourself.
- The module needs a start function, let's call it go and it takes zero arguments.
- The start arguments would look like:
- </p>
- <p><c>percept:profile("test.dat", {test, go, []}, [procs]).</c></p>
- <p>
- For a semi-real example we start a tree of processes that does sorting
- of random numbers. In our model below we use a controller process that
- distributes work to different client processes.
- </p>
- <codeinclude file="sorter.erl" tag="" type="none"></codeinclude>
- <p>We can now start our test using percept:</p>
- <pre>
-Erlang (BEAM) emulator version 5.6 [async-threads:0] [kernel-poll:false]
-
-Eshell V5.6 (abort with ^G)
-1> percept:profile("test.dat", {sorter, go, [5, 2000, 15]}, [procs]).
-Starting profiling.
-ok
- </pre>
- <p>
- Percept sets up the trace and profiling facilities to listen for process
- specific events. It then stores these events to the <c>test.dat</c>
- file. The profiling will go on for the whole duration until
- <c>sorter:go/3</c> returns and the profiling has concluded.
- </p>
- </section>
- <section>
- <title>Data viewing</title>
- <p>
- To analyze this file, use <c>percept:analyze("test.dat")</c>. We can do
- this on any machine with Percept installed. The command will parse the
- data file and insert all events in a RAM database, <c>percept_db</c>. The
- initial command will only prompt how many processes were involved in the
- profile.
- </p>
- <pre>
-2> percept:analyze("test.dat").
-Parsing: "test.dat"
-Parsed 428 entries in 3.81310e-2 s.
- 17 created processes.
- 0 opened ports.
-ok
- </pre>
- <p>
- To view the data we start the web-server using
- <c>percept:start_webserver/1</c>. The command will return the hostname
- and the a port where we should direct our favorite web browser.
- </p>
- <pre>
-3> percept:start_webserver(8888).
-{started,"durin",8888}
-4>
- </pre>
- <section>
- <title>Overview selection</title>
- <p>
- Now we can view our data. The database has its content from
- <c>percept:analyze/1</c> command and the webserver is started.
- </p>
- <p>
- When we click on the <c>overview</c> button in the menu percept will
- generate a graph of the concurrency and send it to our web browser. In this
- view we get no details but rather the big picture. We can see if
- our processes behave in an inefficient manner. Dips in the graph represents
- low concurrency in the erlang system.
- </p>
- <p>
- We can zoom in on different areas of the graph either using the mouse
- to select an area or by specifying min and max ranges in the edit boxes.
- </p>
- <note>
- <p>Measured time is presented in seconds if nothing else is stated.</p>
- </note>
- <image file="percept_overview.gif">
- <icaption>Overview selection</icaption>
- </image>
- </section>
- <section>
- <title>Processes selection</title>
- <p>
- To get a more detailed description we can select the process view by
- clicking the <c>processes</c> button in the menu.
- </p>
- <p>
- The table shows process id's that are click-able and direct you to
- the process information page, a lifetime bar that presents a rough estimate
- in green color about when the process was alive during profiling, an
- entry-point, its registered name if it had one and the process's
- parent id.
- </p>
- <p>
- We can select which processes we want to compare and then hit the
- <c>compare</c> button on the top right of the screen.
- </p>
- <image file="percept_processes.gif">
- <icaption>Processes selection</icaption>
- </image>
- </section>
- <section>
- <title>Compare selection</title>
- <p>
- The activity bar under the concurrency graph shows each process's
- runnability. The color green shows when a process is active (which is
- running or runnable) and the white color represents time when a
- process is inactive (waiting in a receive or is suspended).
- </p>
- <p>
- To inspect a certain process click on the process id button, this will
- direct you to a process information page for that specific process.
- </p>
- <image file="percept_compare.gif">
- <icaption>Processes compare selection</icaption>
- </image>
- </section>
- <section>
- <title>Process information selection</title>
- <p>
- Here we can some general information for the process. Parent and
- children processes, spawn and exit times, entry-point and start arguments.
- </p>
- <p>
- We can also see the process' inactive times. How many times it has
- been waiting, statistical information and most importantly in which
- function.
- </p>
- <p>
- The time percentages presented in process information are of time spent in waiting, not total run time.
- </p>
- <image file="percept_processinfo.gif">
- <icaption>Process information selection</icaption>
- </image>
- </section>
- </section>
- </section>
-</chapter>
diff --git a/lib/percept/doc/src/ref_man.xml b/lib/percept/doc/src/ref_man.xml
deleted file mode 100644
index 143312489b..0000000000
--- a/lib/percept/doc/src/ref_man.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<?xml version="1.0" encoding="utf-8" ?>
-<!DOCTYPE application SYSTEM "application.dtd">
-
-<application xmlns:xi="http://www.w3.org/2001/XInclude">
- <header>
- <copyright>
- <year>2007</year>
- <year>2016</year>
- <holder>Ericsson AB, All Rights Reserved</holder>
- </copyright>
- <legalnotice>
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- The Initial Developer of the Original Code is Ericsson AB.
- </legalnotice>
-
- <title>Percept Reference Manual</title>
- <prepared>Edoc</prepared>
- <docno></docno>
- <date>2007-11-02</date>
- <rev>1.0</rev>
- <file>ref_man.xml</file>
- </header>
- <description>
- <p>
- <em>Percept</em> is an acronym for <em>P</em>ercept - <em>er</em>lang
- <em>c</em>oncurr<em>e</em>ncy <em>p</em>rofiling <em>t</em>ool.
- </p>
- <p>
- It is a tool to visualize application level concurrency and
- identify concurrency bottlenecks.
- </p>
- </description>
- <xi:include href="egd.xml"/>
- <xi:include href="percept.xml"/>
- <xi:include href="percept_profile.xml"/>
-</application>
-
diff --git a/lib/percept/doc/src/sorter.erl b/lib/percept/doc/src/sorter.erl
deleted file mode 100644
index 8d5f2c715c..0000000000
--- a/lib/percept/doc/src/sorter.erl
+++ /dev/null
@@ -1,41 +0,0 @@
--module(sorter).
--export([go/3,loop/0,main/4]).
-
-go(I,N,M) ->
- spawn(?MODULE, main, [I,N,M,self()]),
- receive done -> ok end.
-
-main(I,N,M,Parent) ->
- Pids = lists:foldl(
- fun(_,Ps) ->
- [ spawn(?MODULE,loop, []) | Ps]
- end, [], lists:seq(1,M)),
-
- lists:foreach(
- fun(_) ->
- send_work(N,Pids),
- gather(Pids)
- end, lists:seq(1,I)),
-
- lists:foreach(
- fun(Pid) ->
- Pid ! {self(), quit}
- end, Pids),
-
- gather(Pids), Parent ! done.
-
-send_work(_,[]) -> ok;
-send_work(N,[Pid|Pids]) ->
- Pid ! {self(),sort,N},
- send_work(round(N*1.2),Pids).
-
-loop() ->
- receive
- {Pid, sort, N} -> dummy_sort(N),Pid ! {self(), done},loop();
- {Pid, quit} -> Pid ! {self(), done}
- end.
-
-dummy_sort(N) -> lists:sort([ random:uniform(N) || _ <- lists:seq(1,N)]).
-
-gather([]) -> ok;
-gather([Pid|Pids]) -> receive {Pid, done} -> gather(Pids) end.
diff --git a/lib/percept/doc/src/test1.gif b/lib/percept/doc/src/test1.gif
deleted file mode 100644
index 70a519d8e3..0000000000
--- a/lib/percept/doc/src/test1.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/test2.gif b/lib/percept/doc/src/test2.gif
deleted file mode 100644
index f18e1f9e58..0000000000
--- a/lib/percept/doc/src/test2.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/test3.gif b/lib/percept/doc/src/test3.gif
deleted file mode 100644
index c7581f19aa..0000000000
--- a/lib/percept/doc/src/test3.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/src/test4.gif b/lib/percept/doc/src/test4.gif
deleted file mode 100644
index e7d52c08a3..0000000000
--- a/lib/percept/doc/src/test4.gif
+++ /dev/null
Binary files differ
diff --git a/lib/percept/doc/stylesheet.css b/lib/percept/doc/stylesheet.css
deleted file mode 100644
index 24d8a02145..0000000000
--- a/lib/percept/doc/stylesheet.css
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-BODY {color: #000000;
- background-color: #ffffff;
- margin-left: .4in}
-H1 {margin-left: -.4in}
-H2 {margin-left: -.4in}
-H3 {margin-left: -.2in}
-.logo{float:right;}
-.toc UL {
- list-style-type: none;
- border: solid;
- border-width: thin;
- padding-left: 10px;
- padding-right: 10px;
- padding-top: 5px;
- padding-bottom: 5px;
- background: #f0f0f0;
- letter-spacing: 2px;
- line-height: 20px;
-}
diff --git a/lib/percept/ebin/.gitignore b/lib/percept/ebin/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/ebin/.gitignore
+++ /dev/null
diff --git a/lib/percept/include/.gitignore b/lib/percept/include/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/include/.gitignore
+++ /dev/null
diff --git a/lib/percept/info b/lib/percept/info
deleted file mode 100644
index 07d58d28ae..0000000000
--- a/lib/percept/info
+++ /dev/null
@@ -1,2 +0,0 @@
-group: tools
-short: A concurrency profiler tool.
diff --git a/lib/percept/priv/Makefile b/lib/percept/priv/Makefile
deleted file mode 100644
index a1912edfc0..0000000000
--- a/lib/percept/priv/Makefile
+++ /dev/null
@@ -1,97 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../vsn.mk
-VSN=$(PERCEPT_VSN)
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/percept-$(VSN)
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-CONF_FILES = \
- server_root/conf/mime.types
-
-HTDOCS_FILES = \
- server_root/htdocs/index.html
-
-IMAGE_FILES = \
- server_root/images/nav.png \
- server_root/images/white.png
-
-SCRIPT_FILES = \
- server_root/scripts/percept_area_select.js \
- server_root/scripts/percept_error_handler.js \
- server_root/scripts/percept_select_all.js
-
-CSS_FILES = \
- server_root/css/percept.css
-
-FONT_FILES = \
- fonts/6x11_latin1.wingsfont
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-ERL_COMPILE_FLAGS +=
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-debug opt:
-
-clean:
-
-docs:
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
- # Finished
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/logs"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/htdocs"
- $(INSTALL_DATA) $(HTDOCS_FILES) "$(RELSYSDIR)/priv/server_root/htdocs"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/conf"
- $(INSTALL_DATA) $(CONF_FILES) "$(RELSYSDIR)/priv/server_root/conf"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/scripts"
- $(INSTALL_DATA) $(SCRIPT_FILES) "$(RELSYSDIR)/priv/server_root/scripts"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/css"
- $(INSTALL_DATA) $(CSS_FILES) "$(RELSYSDIR)/priv/server_root/css"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/server_root/images"
- $(INSTALL_DATA) $(IMAGE_FILES) "$(RELSYSDIR)/priv/server_root/images"
- $(INSTALL_DIR) "$(RELSYSDIR)/priv/fonts"
- $(INSTALL_DATA) $(FONT_FILES) "$(RELSYSDIR)/priv/fonts"
-
-release_docs_spec:
-
diff --git a/lib/percept/priv/fonts/6x11_latin1.wingsfont b/lib/percept/priv/fonts/6x11_latin1.wingsfont
deleted file mode 100644
index d1e1c42eef..0000000000
--- a/lib/percept/priv/fonts/6x11_latin1.wingsfont
+++ /dev/null
Binary files differ
diff --git a/lib/percept/priv/logs/.gitignore b/lib/percept/priv/logs/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/priv/logs/.gitignore
+++ /dev/null
diff --git a/lib/percept/priv/obj/.gitignore b/lib/percept/priv/obj/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/priv/obj/.gitignore
+++ /dev/null
diff --git a/lib/percept/priv/server_root/cgi-bin/.gitignore b/lib/percept/priv/server_root/cgi-bin/.gitignore
deleted file mode 100644
index e69de29bb2..0000000000
--- a/lib/percept/priv/server_root/cgi-bin/.gitignore
+++ /dev/null
diff --git a/lib/percept/priv/server_root/conf/mime.types b/lib/percept/priv/server_root/conf/mime.types
deleted file mode 100644
index 6245efdbd9..0000000000
--- a/lib/percept/priv/server_root/conf/mime.types
+++ /dev/null
@@ -1,462 +0,0 @@
-application/EDI-Consent
-application/EDI-X12
-application/EDIFACT
-application/activemessage
-application/andrew-inset ez
-application/applefile
-application/atomicmail
-application/batch-SMTP
-application/beep+xml
-application/cals-1840
-application/commonground
-application/cybercash
-application/dca-rft
-application/dec-dx
-application/dvcs
-application/eshop
-application/http
-application/hyperstudio
-application/iges
-application/index
-application/index.cmd
-application/index.obj
-application/index.response
-application/index.vnd
-application/iotp
-application/ipp
-application/isup
-application/font-tdpfr
-application/mac-binhex40 hqx
-application/mac-compactpro cpt
-application/macwriteii
-application/marc
-application/mathematica
-application/mathematica-old
-application/msword doc
-application/news-message-id
-application/news-transmission
-application/ocsp-request
-application/ocsp-response
-application/octet-stream bin dms lha lzh exe class so dll
-application/oda oda
-application/parityfec
-application/pdf pdf
-application/pgp-encrypted
-application/pgp-keys
-application/pgp-signature
-application/pkcs10
-application/pkcs7-mime
-application/pkcs7-signature
-application/pkix-cert
-application/pkix-crl
-application/pkixcmp
-application/postscript ai eps ps
-application/prs.alvestrand.titrax-sheet
-application/prs.cww
-application/prs.nprend
-application/qsig
-application/remote-printing
-application/riscos
-application/rtf
-application/sdp
-application/set-payment
-application/set-payment-initiation
-application/set-registration
-application/set-registration-initiation
-application/sgml
-application/sgml-open-catalog
-application/sieve
-application/slate
-application/smil smi smil
-application/timestamp-query
-application/timestamp-reply
-application/vemmi
-application/vnd.3M.Post-it-Notes
-application/vnd.FloGraphIt
-application/vnd.accpac.simply.aso
-application/vnd.accpac.simply.imp
-application/vnd.acucobol
-application/vnd.aether.imp
-application/vnd.anser-web-certificate-issue-initiation
-application/vnd.anser-web-funds-transfer-initiation
-application/vnd.audiograph
-application/vnd.businessobjects
-application/vnd.bmi
-application/vnd.canon-cpdl
-application/vnd.canon-lips
-application/vnd.claymore
-application/vnd.commerce-battelle
-application/vnd.commonspace
-application/vnd.comsocaller
-application/vnd.contact.cmsg
-application/vnd.cosmocaller
-application/vnd.cups-postscript
-application/vnd.cups-raster
-application/vnd.cups-raw
-application/vnd.ctc-posml
-application/vnd.cybank
-application/vnd.dna
-application/vnd.dpgraph
-application/vnd.dxr
-application/vnd.ecdis-update
-application/vnd.ecowin.chart
-application/vnd.ecowin.filerequest
-application/vnd.ecowin.fileupdate
-application/vnd.ecowin.series
-application/vnd.ecowin.seriesrequest
-application/vnd.ecowin.seriesupdate
-application/vnd.enliven
-application/vnd.epson.esf
-application/vnd.epson.msf
-application/vnd.epson.quickanime
-application/vnd.epson.salt
-application/vnd.epson.ssf
-application/vnd.ericsson.quickcall
-application/vnd.eudora.data
-application/vnd.fdf
-application/vnd.ffsns
-application/vnd.framemaker
-application/vnd.fsc.weblaunch
-application/vnd.fujitsu.oasys
-application/vnd.fujitsu.oasys2
-application/vnd.fujitsu.oasys3
-application/vnd.fujitsu.oasysgp
-application/vnd.fujitsu.oasysprs
-application/vnd.fujixerox.ddd
-application/vnd.fujixerox.docuworks
-application/vnd.fujixerox.docuworks.binder
-application/vnd.fut-misnet
-application/vnd.grafeq
-application/vnd.groove-account
-application/vnd.groove-identity-message
-application/vnd.groove-injector
-application/vnd.groove-tool-message
-application/vnd.groove-tool-template
-application/vnd.groove-vcard
-application/vnd.hhe.lesson-player
-application/vnd.hp-HPGL
-application/vnd.hp-PCL
-application/vnd.hp-PCLXL
-application/vnd.hp-hpid
-application/vnd.hp-hps
-application/vnd.httphone
-application/vnd.hzn-3d-crossword
-application/vnd.ibm.afplinedata
-application/vnd.ibm.MiniPay
-application/vnd.ibm.modcap
-application/vnd.informix-visionary
-application/vnd.intercon.formnet
-application/vnd.intertrust.digibox
-application/vnd.intertrust.nncp
-application/vnd.intu.qbo
-application/vnd.intu.qfx
-application/vnd.irepository.package+xml
-application/vnd.is-xpr
-application/vnd.japannet-directory-service
-application/vnd.japannet-jpnstore-wakeup
-application/vnd.japannet-payment-wakeup
-application/vnd.japannet-registration
-application/vnd.japannet-registration-wakeup
-application/vnd.japannet-setstore-wakeup
-application/vnd.japannet-verification
-application/vnd.japannet-verification-wakeup
-application/vnd.koan
-application/vnd.lotus-1-2-3
-application/vnd.lotus-approach
-application/vnd.lotus-freelance
-application/vnd.lotus-notes
-application/vnd.lotus-organizer
-application/vnd.lotus-screencam
-application/vnd.lotus-wordpro
-application/vnd.mcd
-application/vnd.mediastation.cdkey
-application/vnd.meridian-slingshot
-application/vnd.mif mif
-application/vnd.minisoft-hp3000-save
-application/vnd.mitsubishi.misty-guard.trustweb
-application/vnd.mobius.daf
-application/vnd.mobius.dis
-application/vnd.mobius.msl
-application/vnd.mobius.plc
-application/vnd.mobius.txf
-application/vnd.motorola.flexsuite
-application/vnd.motorola.flexsuite.adsi
-application/vnd.motorola.flexsuite.fis
-application/vnd.motorola.flexsuite.gotap
-application/vnd.motorola.flexsuite.kmr
-application/vnd.motorola.flexsuite.ttc
-application/vnd.motorola.flexsuite.wem
-application/vnd.mozilla.xul+xml
-application/vnd.ms-artgalry
-application/vnd.ms-asf
-application/vnd.ms-excel xls
-application/vnd.ms-lrm
-application/vnd.ms-powerpoint ppt
-application/vnd.ms-project
-application/vnd.ms-tnef
-application/vnd.ms-works
-application/vnd.mseq
-application/vnd.msign
-application/vnd.music-niff
-application/vnd.musician
-application/vnd.netfpx
-application/vnd.noblenet-directory
-application/vnd.noblenet-sealer
-application/vnd.noblenet-web
-application/vnd.novadigm.EDM
-application/vnd.novadigm.EDX
-application/vnd.novadigm.EXT
-application/vnd.osa.netdeploy
-application/vnd.palm
-application/vnd.pg.format
-application/vnd.pg.osasli
-application/vnd.powerbuilder6
-application/vnd.powerbuilder6-s
-application/vnd.powerbuilder7
-application/vnd.powerbuilder7-s
-application/vnd.powerbuilder75
-application/vnd.powerbuilder75-s
-application/vnd.previewsystems.box
-application/vnd.publishare-delta-tree
-application/vnd.pvi.ptid1
-application/vnd.pwg-xhtml-print+xml
-application/vnd.rapid
-application/vnd.s3sms
-application/vnd.seemail
-application/vnd.shana.informed.formdata
-application/vnd.shana.informed.formtemplate
-application/vnd.shana.informed.interchange
-application/vnd.shana.informed.package
-application/vnd.sss-cod
-application/vnd.sss-dtf
-application/vnd.sss-ntf
-application/vnd.street-stream
-application/vnd.svd
-application/vnd.swiftview-ics
-application/vnd.triscape.mxs
-application/vnd.trueapp
-application/vnd.truedoc
-application/vnd.tve-trigger
-application/vnd.ufdl
-application/vnd.uplanet.alert
-application/vnd.uplanet.alert-wbxml
-application/vnd.uplanet.bearer-choice-wbxml
-application/vnd.uplanet.bearer-choice
-application/vnd.uplanet.cacheop
-application/vnd.uplanet.cacheop-wbxml
-application/vnd.uplanet.channel
-application/vnd.uplanet.channel-wbxml
-application/vnd.uplanet.list
-application/vnd.uplanet.list-wbxml
-application/vnd.uplanet.listcmd
-application/vnd.uplanet.listcmd-wbxml
-application/vnd.uplanet.signal
-application/vnd.vcx
-application/vnd.vectorworks
-application/vnd.vidsoft.vidconference
-application/vnd.visio
-application/vnd.vividence.scriptfile
-application/vnd.wap.sic
-application/vnd.wap.slc
-application/vnd.wap.wbxml wbxml
-application/vnd.wap.wmlc wmlc
-application/vnd.wap.wmlscriptc wmlsc
-application/vnd.webturbo
-application/vnd.wrq-hp3000-labelled
-application/vnd.wt.stf
-application/vnd.xara
-application/vnd.xfdl
-application/vnd.yellowriver-custom-menu
-application/whoispp-query
-application/whoispp-response
-application/wita
-application/wordperfect5.1
-application/x-bcpio bcpio
-application/x-cdlink vcd
-application/x-chess-pgn pgn
-application/x-compress
-application/x-cpio cpio
-application/x-csh csh
-application/x-director dcr dir dxr
-application/x-dvi dvi
-application/x-futuresplash spl
-application/x-gtar gtar
-application/x-gzip
-application/x-hdf hdf
-application/x-javascript js
-application/x-koan skp skd skt skm
-application/x-latex latex
-application/x-netcdf nc cdf
-application/x-sh sh
-application/x-shar shar
-application/x-shockwave-flash swf
-application/x-stuffit sit
-application/x-sv4cpio sv4cpio
-application/x-sv4crc sv4crc
-application/x-tar tar
-application/x-tcl tcl
-application/x-tex tex
-application/x-texinfo texinfo texi
-application/x-troff t tr roff
-application/x-troff-man man
-application/x-troff-me me
-application/x-troff-ms ms
-application/x-ustar ustar
-application/x-wais-source src
-application/x400-bp
-application/xml
-application/xml-dtd
-application/xml-external-parsed-entity
-application/zip zip
-audio/32kadpcm
-audio/basic au snd
-audio/g.722.1
-audio/l16
-audio/midi mid midi kar
-audio/mp4a-latm
-audio/mpa-robust
-audio/mpeg mpga mp2 mp3
-audio/parityfec
-audio/prs.sid
-audio/telephone-event
-audio/tone
-audio/vnd.cisco.nse
-audio/vnd.cns.anp1
-audio/vnd.cns.inf1
-audio/vnd.digital-winds
-audio/vnd.everad.plj
-audio/vnd.lucent.voice
-audio/vnd.nortel.vbk
-audio/vnd.nuera.ecelp4800
-audio/vnd.nuera.ecelp7470
-audio/vnd.nuera.ecelp9600
-audio/vnd.octel.sbc
-audio/vnd.qcelp
-audio/vnd.rhetorex.32kadpcm
-audio/vnd.vmx.cvsd
-audio/x-aiff aif aiff aifc
-audio/x-mpegurl m3u
-audio/x-pn-realaudio ram rm
-audio/x-pn-realaudio-plugin rpm
-audio/x-realaudio ra
-audio/x-wav wav
-chemical/x-pdb pdb
-chemical/x-xyz xyz
-image/bmp bmp
-image/cgm
-image/g3fax
-image/gif gif
-image/ief ief
-image/jpeg jpeg jpg jpe
-image/naplps
-image/png png
-image/prs.btif
-image/prs.pti
-image/tiff tiff tif
-image/vnd.cns.inf2
-image/vnd.dwg
-image/vnd.dxf
-image/vnd.fastbidsheet
-image/vnd.fpx
-image/vnd.fst
-image/vnd.fujixerox.edmics-mmr
-image/vnd.fujixerox.edmics-rlc
-image/vnd.mix
-image/vnd.net-fpx
-image/vnd.svf
-image/vnd.wap.wbmp wbmp
-image/vnd.xiff
-image/x-cmu-raster ras
-image/x-portable-anymap pnm
-image/x-portable-bitmap pbm
-image/x-portable-graymap pgm
-image/x-portable-pixmap ppm
-image/x-rgb rgb
-image/x-xbitmap xbm
-image/x-xpixmap xpm
-image/x-xwindowdump xwd
-message/delivery-status
-message/disposition-notification
-message/external-body
-message/http
-message/news
-message/partial
-message/rfc822
-message/s-http
-model/iges igs iges
-model/mesh msh mesh silo
-model/vnd.dwf
-model/vnd.flatland.3dml
-model/vnd.gdl
-model/vnd.gs-gdl
-model/vnd.gtw
-model/vnd.mts
-model/vnd.vtu
-model/vrml wrl vrml
-multipart/alternative
-multipart/appledouble
-multipart/byteranges
-multipart/digest
-multipart/encrypted
-multipart/form-data
-multipart/header-set
-multipart/mixed
-multipart/parallel
-multipart/related
-multipart/report
-multipart/signed
-multipart/voice-message
-text/calendar
-text/css css
-text/directory
-text/enriched
-text/html html htm
-text/parityfec
-text/plain asc txt
-text/prs.lines.tag
-text/rfc822-headers
-text/richtext rtx
-text/rtf rtf
-text/sgml sgml sgm
-text/tab-separated-values tsv
-text/t140
-text/uri-list
-text/vnd.DMClientScript
-text/vnd.IPTC.NITF
-text/vnd.IPTC.NewsML
-text/vnd.abc
-text/vnd.curl
-text/vnd.flatland.3dml
-text/vnd.fly
-text/vnd.fmi.flexstor
-text/vnd.in3d.3dml
-text/vnd.in3d.spot
-text/vnd.latex-z
-text/vnd.motorola.reflex
-text/vnd.ms-mediapackage
-text/vnd.wap.si
-text/vnd.wap.sl
-text/vnd.wap.wml wml
-text/vnd.wap.wmlscript wmls
-text/x-setext etx
-text/x-server-parsed-html shtml
-text/xml xml xsl
-text/xml-external-parsed-entity
-video/mp4v-es
-video/mpeg mpeg mpg mpe
-video/parityfec
-video/pointer
-video/quicktime qt mov
-video/vnd.fvt
-video/vnd.motorola.video
-video/vnd.motorola.videop
-video/vnd.mpegurl mxu
-video/vnd.mts
-video/vnd.nokia.interleaved-multimedia
-video/vnd.vivo
-video/x-msvideo avi
-video/x-sgi-movie movie
-x-conference/x-cooltalk ice
-
-
-
diff --git a/lib/percept/priv/server_root/css/percept.css b/lib/percept/priv/server_root/css/percept.css
deleted file mode 100644
index 2d0734b6b6..0000000000
--- a/lib/percept/priv/server_root/css/percept.css
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-/* Globals */
-html, body {
- margin: 0;
- padding: 0;
- font: 12px Verdana;
- background: #7a83a2;
-}
-
-table {
- border-collapse: collapse;
- /*width: 100%;*/
-}
-
-tr.even {
- background-color: #ffffff; color: black;
-}
-
-tr.odd {
- background-color: #def2ef; color: black;
-}
-
-td {
- text-valign: top;
- text-align: right;
- font: 14px Verdana;
-}
-
-th {
- letter-spacing: 2px;
- text-align: right;
- padding: 4px 4px 4px 8px;
-}
-
-a {
- color: yellow;
- text-decoration: none;
-}
-
-a:hover {
- text-decoration: underline;
-}
-
-td a {
- color: #101010;
-}
-
-img {
- border: 0;
-}
-
-
-/* Header and footer stuff */
-
-#header {
- font: bold 24px Verdana;
- padding-left: 156px;
- padding-right: 156px;
- padding-top: 10px;
- padding-bottom: 10px;
- height: 74px;
- text-align: right;
- background: #7a83a2;
-}
-
-#footer {
- font: 12px Verdana;
- position: relative;
- padding: 5px;
- border-top: 1px solid black;
- clear:left;
-}
-
-
-/* Content stuff */
-
-#content {
- background: #fefefe;
- position: relative;
- padding: 5px 25px 5px 25px;
- margin: 0px 60px 0px 60px;
- border-top: 1px solid #383a32;
- border-left: 1px solid #383a32;
- border-right: 1px solid #383a32;
- border-bottom: 1px solid #383a32;
-}
-
-.table_header {
- font-decoration: underline;
- width: 100%;
-}
-
-/* Menu */
-
-#menu {
- margin: 0px 60px 0px 60px;
- height: 30px;
- padding-right: 0px;
- background-image: url('../images/nav.png');
- background-repeat: repeat-x;
- padding-top: 0px;
- border-top: 1px solid #383a32;
- border-left: 1px solid #383a32;
- border-right: 1px solid #383a32;
-}
-
-.menu_tabs {
- overflow: hidden;
-}
-
-.menu_tabs ul {
- margin: 0;
- padding: 0;
- font: bold 12px Verdana;
- list-style-type: none;
-}
-
-.menu_tabs li {
- display: inline;
- margin: 0;
- background-repeat: repeat-x;
-}
-
-.menu_tabs li a {
- float: right;
- display: block;
- text-decoration: none;
- margin: 0;
- padding: 8px; 7px 8px; 3px;
- border-left: 1px solid #777487;
-}
-
-.menu_tabs li a:visited {
- color: black;
-}
-
-.menu_tabs li a:hover {
- background: #cae8ea;
-}
-
-.menu_tabs li a:selected .menu_tabs li a:active {
- background: yellow;
-}
diff --git a/lib/percept/priv/server_root/htdocs/index.html b/lib/percept/priv/server_root/htdocs/index.html
deleted file mode 100644
index f7322cba89..0000000000
--- a/lib/percept/priv/server_root/htdocs/index.html
+++ /dev/null
@@ -1,41 +0,0 @@
-<!--
- %CopyrightBegin%
-
- Copyright Ericsson AB 2007-2016. All Rights Reserved.
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
- %CopyrightEnd%
--->
-<html>
-<head>
- <title>percept</title>
- <meta http-equiv="Content-Type" content="text/html;charset=iso-8859-1" />
- <link href="/css/percept.css" rel="stylesheet" type="text/css">
-</head>
-
-<body>
- <div id="header"><a href=/index.html>percept</a></div>
- <div id="menu" class="menu_tabs">
- <ul>
- <li><a href=/cgi-bin/percept_html/databases_page>databases</a></li>
- <li><a href=/cgi-bin/percept_html/processes_page>processes</a></li>
- <li><a href=/cgi-bin/percept_html/page>overview</a></li>
- </ul>
- </div>
- <div id="content">
- <p>Percept - Erlang Concurrency Profiling Tool</p>
- </div>
-</body>
-</html>
-
diff --git a/lib/percept/priv/server_root/images/nav.png b/lib/percept/priv/server_root/images/nav.png
deleted file mode 100644
index d136e806b1..0000000000
--- a/lib/percept/priv/server_root/images/nav.png
+++ /dev/null
Binary files differ
diff --git a/lib/percept/priv/server_root/images/white.png b/lib/percept/priv/server_root/images/white.png
deleted file mode 100644
index 94381b429d..0000000000
--- a/lib/percept/priv/server_root/images/white.png
+++ /dev/null
Binary files differ
diff --git a/lib/percept/priv/server_root/scripts/percept_area_select.js b/lib/percept/priv/server_root/scripts/percept_area_select.js
deleted file mode 100644
index 83fbb02c92..0000000000
--- a/lib/percept/priv/server_root/scripts/percept_area_select.js
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-function size_image(img, src) {
- percept_content = document.getElementById("content");
- var width = percept_content.offsetWidth - 120;
- var imgfile = "/cgi-bin/percept_graph/" + src + "&width=" + width;
- img.src = imgfile;
- img.onload = '';
-}
-
-function load_image() {
- var percept_graph = document.getElementById("percept_graph");
- if (percept_graph) {
- percept_content = document.getElementById("content");
- var width = percept_content.offsetWidth - 50;
- var height = max(screen.height - 550, 600);
- var rmin = document.form_area.data_min.value;
- var rmax = document.form_area.data_max.value;
-
- percept_graph.style.backgroundImage = "url('/cgi-bin/percept_graph/graph" +
- "?range_min=" + rmin +
- "&range_max=" + rmax +
- "&width=" + width +
- "&height=" + height + "')";
- percept_graph.style.width = width;
- percept_graph.style.height = height;
- }
-}
-
-function select_image() {
- var Graph = document.getElementById("percept_graph");
- if (Graph) {
- var GraphIndex = document.form_area.graph_select.selectedIndex;
- var GraphSelectValue = document.form_area.graph_select.options[GraphIndex].value;
- Graph.style.backgroundImage = "url('" + GraphSelectValue +"')";
- }
-}
-
-function select_down(event) {
- var Graf = document.getElementById("percept_graph");
- var Area = document.getElementById("percept_areaselect");
- var x = event.offsetX?(event.offsetX):event.pageX-Graf.offsetLeft;
- x = x - 60;
-
- var width = Graf.offsetWidth;
- var height = Graf.offsetHeight;
- var margin = 20;
-
- var Xmin = document.form_area.data_min.value;
- var Xmax = document.form_area.data_max.value;
-
- // Trim edges
-
- if ( x < margin ) {
- x = margin;
- }
-
- if ( x > width - margin ) {
- x = width - margin;
- }
-
- Area.style.left = x;
- Area.style.top = height - margin;
- Area.style.width = 1;
- Area.style.height = margin;
- Area.moving = true;
- Area.bgcolor = "#00ff00";
- Area.style.visibility = "visible";
- Area.style.borderRight = "1px solid #000"
- Area.style.borderLeft = "1px solid #000"
- Area.style.opacity = 0.65;
- Area.style.filter = 'alpha(opacity=65)';
- var RangeMin = convert_image2graph(x, Xmin, Xmax, margin, width - margin);
- if (RangeMin == 0) document.form_area.range_min.value = 0.0;
- else document.form_area.range_min.value = RangeMin;
-}
-
- function select_move(event) {
- var Graf = document.getElementById("percept_graph");
- var Area = document.getElementById("percept_areaselect");
- var x = event.offsetX?(event.offsetX):event.pageX-Graf.offsetLeft;
- x = x - 60;
- if (Area.moving == true) {
-
- var width = Graf.offsetWidth;
- var height = Graf.offsetHeight;
- var margin = 20;
- var Xmin = document.form_area.data_min.value;
- var Xmax = document.form_area.data_max.value;
-
- // Trim edges
-
- if ( x < margin ) {
- x = margin;
- }
-
- if ( x > width - margin ) {
- x = width - margin;
- }
-
- var x0 = min(x, Area.offsetLeft);
- var x1 = max(x, Area.offsetLeft);
- var w = (x1 - x0);
- Area.style.left = x0;
- Area.style.width = w;
- var RangeMin = convert_image2graph(x0, Xmin, Xmax, margin, width - margin);
- var RangeMax = convert_image2graph(x1, Xmin, Xmax, margin, width - margin);
- Area.style.visibility = "visible";
-
- if (RangeMin == 0) document.form_area.range_min.value = 0.0;
- else document.form_area.range_min.value = RangeMin;
- if (RangeMax == 0) document.form_area.range_max.value = 0.0;
- else document.form_area.range_max.value = RangeMax;
- }
-}
-
-function select_up(event) {
- var Graf = document.getElementById("percept_graph");
- var Area = document.getElementById("percept_areaselect");
- var x = event.offsetX?(event.offsetX):event.pageX-Graf.offsetLeft;
-
- x = x - 60;
- var width = Graf.offsetWidth;
- var height = Graf.offsetHeight;
- var margin = 20;
- var Xmin = document.form_area.data_min.value;
- var Xmax = document.form_area.data_max.value;
-
- // Trim edges
-
- if ( x < margin ) {
- x = margin;
- }
-
- if ( x > width - margin ) {
- x = width - margin;
- }
-
- var w = (x - Area.style.offsetLeft);
-
- Area.moving = false;
- Area.style.width = w;
- var RangeMax = convert_image2graph(x, Xmin, Xmax, margin, width - margin);
- if (RangeMax == 0) document.form_area.range_max.value = 0.0;
- else document.form_area.range_max.value = RangeMax;
-}
-
-function min(A, B) {
- if (A > B) return B;
- else return A;
-}
-
-function max(A,B) {
- if (A > B) return A;
- else return B;
-}
-
-function convert_image2graph(X, Xmin, Xmax, X0, X1) {
- var ImageWidth = X1 - X0;
- var RangeWidth = Xmax - Xmin;
- var DX = RangeWidth/ImageWidth;
- var Xprime = (X - X0)*DX + Xmin*1.0;
- return Xprime;
-}
diff --git a/lib/percept/priv/server_root/scripts/percept_error_handler.js b/lib/percept/priv/server_root/scripts/percept_error_handler.js
deleted file mode 100644
index dad8f2b566..0000000000
--- a/lib/percept/priv/server_root/scripts/percept_error_handler.js
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-var onerror=handleErr;
-
-function handleErr(msg,url,l) {
- var txt = "Error: " + msg + "\nURL: " + url + "\nCode line: " + l;
- alert(txt);
-}
diff --git a/lib/percept/priv/server_root/scripts/percept_select_all.js b/lib/percept/priv/server_root/scripts/percept_select_all.js
deleted file mode 100644
index c8eb966059..0000000000
--- a/lib/percept/priv/server_root/scripts/percept_select_all.js
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * %CopyrightBegin%
- *
- * Copyright Ericsson AB 2007-2016. All Rights Reserved.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- * %CopyrightEnd%
- */
-
-function selectall() {
- for (var i = 0; i < document.process_select.elements.length; i++) {
- var e = document.process_select.elements[i];
- if ((e.name != 'select_all') && (e.type == 'checkbox')) {
- e.checked = document.process_select.select_all.checked;
- }
- }
-}
diff --git a/lib/percept/src/Makefile b/lib/percept/src/Makefile
deleted file mode 100644
index b2ec87d08c..0000000000
--- a/lib/percept/src/Makefile
+++ /dev/null
@@ -1,108 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-
-include $(ERL_TOP)/make/target.mk
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Application version
-# ----------------------------------------------------
-include ../vsn.mk
-VSN=$(PERCEPT_VSN)
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/lib/percept-$(VSN)
-
-# ----------------------------------------------------
-# Common Macros
-# ----------------------------------------------------
-
-MODULES= \
- egd \
- egd_png \
- egd_font \
- egd_render \
- egd_primitives \
- percept \
- percept_db \
- percept_html \
- percept_image \
- percept_graph \
- percept_analyzer
-
-
-#HRL_FILES= ../include/
-
-INTERNAL_HRL_FILES= egd.hrl percept.hrl
-
-ERL_FILES= $(MODULES:%=%.erl)
-
-TARGET_FILES= $(MODULES:%=$(EBIN)/%.$(EMULATOR)) $(APP_TARGET) $(APPUP_TARGET)
-
-APP_FILE= percept.app
-
-APP_SRC= $(APP_FILE).src
-APP_TARGET= $(EBIN)/$(APP_FILE)
-
-APPUP_FILE= percept.appup
-
-APPUP_SRC= $(APPUP_FILE).src
-APPUP_TARGET= $(EBIN)/$(APPUP_FILE)
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-ERL_COMPILE_FLAGS += +warn_unused_vars -I../include
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-debug opt: $(TARGET_FILES)
-
-clean:
- rm -f $(TARGET_FILES)
- rm -f errs core *~
-
-$(APP_TARGET): $(APP_SRC) ../vsn.mk
- $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@
-
-$(APPUP_TARGET): $(APPUP_SRC) ../vsn.mk
- $(vsn_verbose)sed -e 's;%VSN%;$(VSN);' $< > $@
-
-docs:
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
- $(INSTALL_DIR) "$(RELSYSDIR)/src"
- $(INSTALL_DATA) $(ERL_FILES) "$(RELSYSDIR)/src"
- $(INSTALL_DATA) $(INTERNAL_HRL_FILES) "$(RELSYSDIR)/src"
-# $(INSTALL_DIR) "$(RELSYSDIR)/include"
-# $(INSTALL_DATA) $(HRL_FILES) "$(RELSYSDIR)/include"
- $(INSTALL_DIR) "$(RELSYSDIR)/ebin"
- $(INSTALL_DATA) $(TARGET_FILES) "$(RELSYSDIR)/ebin"
-
-release_docs_spec:
-
diff --git a/lib/percept/src/egd.erl b/lib/percept/src/egd.erl
deleted file mode 100644
index fe52da71f1..0000000000
--- a/lib/percept/src/egd.erl
+++ /dev/null
@@ -1,275 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc egd - erlang graphical drawer
-%%
-%%
-
--module(egd).
-
--export([create/2, destroy/1, information/1]).
--export([text/5, line/4, color/1, color/2]).
--export([rectangle/4, filledRectangle/4, filledEllipse/4]).
--export([arc/4, arc/5]).
--export([render/1, render/2, render/3]).
-
--export([filledTriangle/5, polygon/3]).
-
--export([save/2]).
-
--include("egd.hrl").
-
-%%==========================================================================
-%% Type definitions
-%%==========================================================================
-
-%% @type egd_image()
-%% @type font()
-%% @type point() = {integer(), integer()}
-%% @type color()
-%% @type render_option() = {render_engine, opaque} | {render_engine, alpha}
-
--type egd_image() :: pid().
--type point() :: {non_neg_integer(), non_neg_integer()}.
--type render_option() :: {'render_engine', 'opaque'} | {'render_engine', 'alpha'}.
--type color() :: {float(), float(), float(), float()}.
-
-%%==========================================================================
-%% Interface functions
-%%==========================================================================
-
-%% @spec create(integer(), integer()) -> egd_image()
-%% @doc Creates an image area and returns its reference.
-
--spec create(Width :: integer(), Height :: integer()) -> egd_image().
-
-create(Width,Height) ->
- spawn_link(fun() -> init(trunc(Width),trunc(Height)) end).
-
-
-%% @spec destroy(egd_image()) -> ok
-%% @doc Destroys the image.
-
--spec destroy(Image :: egd_image()) -> ok.
-
-destroy(Image) ->
- cast(Image, destroy).
-
-
-%% @spec render(egd_image()) -> binary()
-%% @equiv render(Image, png, [{render_engine, opaque}])
-
--spec render(Image :: egd_image()) -> binary().
-
-render(Image) ->
- render(Image, png, [{render_engine, opaque}]).
-
-%% @spec render(egd_image(), png | raw_bitmap) -> binary()
-%% @equiv render(Image, Type, [{render_engine, opaque}])
-
-render(Image, Type) ->
- render(Image, Type, [{render_engine, opaque}]).
-
-%% @spec render(egd_image(), png | raw_bitmap, [render_option()]) -> binary()
-%% @doc Renders a binary from the primitives specified by egd_image(). The
-%% binary can either be a raw bitmap with rgb tripplets or a binary in png
-%% format.
-
--spec render(
- Image :: egd_image(),
- Type :: 'png' | 'raw_bitmap' | 'eps',
- Options :: [render_option()]) -> binary().
-
-render(Image, Type, Options) ->
- {render_engine, RenderType} = proplists:lookup(render_engine, Options),
- call(Image, {render, Type, RenderType}).
-
-
-%% @spec information(egd_image()) -> ok
-%% @hidden
-%% @doc Writes out information about the image. This is a debug feature
-%% mainly.
-
-information(Pid) ->
- cast(Pid, information).
-
-%% @spec line(egd_image(), point(), point(), color()) -> ok
-%% @doc Creates a line object from P1 to P2 in the image.
-
--spec line(
- Image :: egd_image(),
- P1 :: point(),
- P2 :: point(),
- Color :: color()) -> 'ok'.
-
-line(Image, P1, P2, Color) ->
- cast(Image, {line, P1, P2, Color}).
-
-%% @spec color( Value | Name ) -> color()
-%% where
-%% Value = {byte(), byte(), byte()} | {byte(), byte(), byte(), byte()}
-%% Name = black | silver | gray | white | maroon | red | purple | fuchia | green | lime | olive | yellow | navy | blue | teal | aqua
-%% @doc Creates a color reference.
-
--spec color(Value :: {byte(), byte(), byte()} | {byte(), byte(), byte(), byte()} | atom()) ->
- color().
-
-color(Color) ->
- egd_primitives:color(Color).
-
-%% @spec color(egd_image(), {byte(), byte(), byte()}) -> color()
-%% @doc Creates a color reference.
-%% @hidden
-
-color(_Image, Color) ->
- egd_primitives:color(Color).
-
-%% @spec text(egd_image(), point(), font(), string(), color()) -> ok
-%% @doc Creates a text object.
-
-text(Image, P, Font, Text, Color) ->
- cast(Image, {text, P, Font, Text, Color}).
-
-%% @spec rectangle(egd_image(), point(), point(), color()) -> ok
-%% @doc Creates a rectangle object.
-
-rectangle(Image, P1, P2, Color) ->
- cast(Image, {rectangle, P1, P2, Color}).
-
-%% @spec filledRectangle(egd_image(), point(), point(), color()) -> ok
-%% @doc Creates a filled rectangle object.
-
-filledRectangle(Image, P1, P2, Color) ->
- cast(Image, {filled_rectangle, P1, P2, Color}).
-
-%% @spec filledEllipse(egd_image(), point(), point(), color()) -> ok
-%% @doc Creates a filled ellipse object.
-
-filledEllipse(Image, P1, P2, Color) ->
- cast(Image, {filled_ellipse, P1, P2, Color}).
-
-%% @spec filledTriangle(egd_image(), point(), point(), point(), color()) -> ok
-%% @hidden
-%% @doc Creates a filled triangle object.
-
-filledTriangle(Image, P1, P2, P3, Color) ->
- cast(Image, {filled_triangle, P1, P2, P3, Color}).
-
-%% @spec polygon(egd_image(), [point()], color()) -> ok
-%% @hidden
-%% @doc Creates a filled filled polygon object.
-
-polygon(Image, Pts, Color) ->
- cast(Image, {polygon, Pts, Color}).
-
-%% @spec arc(egd_image(), point(), point(), color()) -> ok
-%% @hidden
-%% @doc Creates an arc with radius of bbx corner.
-
-arc(Image, P1, P2, Color) ->
- cast(Image, {arc, P1, P2, Color}).
-
-%% @spec arc(egd_image(), point(), point(), integer(), color()) -> ok
-%% @hidden
-%% @doc Creates an arc.
-
-arc(Image, P1, P2, D, Color) ->
- cast(Image, {arc, P1, P2, D, Color}).
-
-%% @spec save(binary(), string()) -> ok
-%% @doc Saves the binary to file.
-
-save(Binary, Filename) when is_binary(Binary) ->
- ok = file:write_file(Filename, Binary),
- ok.
-% ---------------------------------
-% Aux functions
-% ---------------------------------
-
-cast(Pid, Command) ->
- Pid ! {egd, self(), Command},
- ok.
-
-call(Pid, Command) ->
- Pid ! {egd, self(), Command},
- receive {egd, Pid, Result} -> Result end.
-
-% ---------------------------------
-% Server loop
-% ---------------------------------
-
-init(W,H) ->
- Image = egd_primitives:create(W,H),
- loop(Image).
-
-loop(Image) ->
- receive
- % Quitting
- {egd, _Pid, destroy} -> ok;
-
- % Rendering
- {egd, Pid, {render, BinaryType, RenderType}} ->
- case BinaryType of
- raw_bitmap ->
- Bitmap = egd_render:binary(Image, RenderType),
- Pid ! {egd, self(), Bitmap},
- loop(Image);
- eps ->
- Eps = egd_render:eps(Image),
- Pid ! {egd, self(), Eps},
- loop(Image);
- png ->
- Bitmap = egd_render:binary(Image, RenderType),
- Png = egd_png:binary(
- Image#image.width,
- Image#image.height,
- Bitmap),
- Pid ! {egd, self(), Png},
- loop(Image);
- Unhandled ->
- Pid ! {egd, self(), {error, {format, Unhandled}}},
- loop(Image)
- end;
-
- % Drawing primitives
- {egd, _Pid, {line, P1, P2, C}} ->
- loop(egd_primitives:line(Image, P1, P2, C));
- {egd, _Pid, {text, P, Font, Text, C}} ->
- loop(egd_primitives:text(Image, P, Font, Text, C));
- {egd, _Pid, {filled_ellipse, P1, P2, C}} ->
- loop(egd_primitives:filledEllipse(Image, P1, P2, C));
- {egd, _Pid, {filled_rectangle, P1, P2, C}} ->
- loop(egd_primitives:filledRectangle(Image, P1, P2, C));
- {egd, _Pid, {filled_triangle, P1, P2, P3, C}} ->
- loop(egd_primitives:filledTriangle(Image, P1, P2, P3, C));
- {egd, _Pid, {polygon, Pts, C}} ->
- loop(egd_primitives:polygon(Image, Pts, C));
- {egd, _Pid, {arc, P1, P2, C}} ->
- loop(egd_primitives:arc(Image, P1, P2, C));
- {egd, _Pid, {arc, P1, P2, D, C}} ->
- loop(egd_primitives:arc(Image, P1, P2, D, C));
- {egd, _Pid, {rectangle, P1, P2, C}} ->
- loop(egd_primitives:rectangle(Image, P1, P2, C));
- {egd, _Pid, information} ->
- egd_primitives:info(Image),
- loop(Image);
- _ ->
- loop(Image)
- end.
diff --git a/lib/percept/src/egd.hrl b/lib/percept/src/egd.hrl
deleted file mode 100644
index fc0a7e10ee..0000000000
--- a/lib/percept/src/egd.hrl
+++ /dev/null
@@ -1,45 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
--type rgba_float() :: {float(), float(), float(), float()}.
--type rgba_byte() :: {byte(), byte(), byte(), byte()}.
--type rgb() :: {byte(), byte(), byte()}.
-
--record(image_object, {
- type,
- points = [],
- span,
- internals,
- intervals,
- color}). % RGBA in float values
-
--record(image, {
- width,
- height,
- objects = [],
- background = {1.0,1.0,1.0,1.0},
- image}).
-
--define(debug, void).
-
--ifdef(debug).
--define(dbg(X), io:format("DEBUG: ~p:~p~n",[?MODULE, X])).
--else.
--define(dbg(X), ok).
--endif.
diff --git a/lib/percept/src/egd_font.erl b/lib/percept/src/egd_font.erl
deleted file mode 100644
index ef1cc434df..0000000000
--- a/lib/percept/src/egd_font.erl
+++ /dev/null
@@ -1,173 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc egd_font
-%%
-
--module(egd_font).
-
--export([load/1, size/1, glyph/2]).
--include("egd.hrl").
-
-%% Font represenatation in ets table
-%% egd_font_table
-%%
-%% Information:
-%% {Key, Description, Size}
-%% Key :: {Font :: atom(), information}
-%% Description :: any(), Description header from font file
-%% Size :: {W :: integer(), H :: integer()}
-%%
-%% Glyphs:
-%% {Key, Translation LSs} where
-%% Key :: {Font :: atom(), Code :: integer()}, Code = glyph char code
-%% Translation :: {
-%% W :: integer(), % BBx width
-%% H :: integer(), % BBx height
-%% X0 :: integer(), % X start
-%% Y0 :: integer(), % Y start
-%% Xm :: integer(), % Glyph X move when drawing
-%% }
-%% LSs :: [[{Xl :: integer(), Xr :: integer()}]]
-%% The first list is height (top to bottom), the inner list is the list
-%% of line spans for the glyphs horizontal pixels.
-%%
-
-%%==========================================================================
-%% Interface functions
-%%==========================================================================
-
-size(Font) ->
- [{_Key, _Description, Size}] = ets:lookup(egd_font_table,{Font,information}),
- Size.
-
-glyph(Font, Code) ->
- [{_Key, Translation, LSs}] = ets:lookup(egd_font_table,{Font,Code}),
- {Translation, LSs}.
-
-load(Filename) ->
- {ok, Bin} = file:read_file(Filename),
- Font = erlang:binary_to_term(Bin),
- load_font_header(Font).
-
-%%==========================================================================
-%% Internal functions
-%%==========================================================================
-
-%% ETS handler functions
-
-initialize_table() ->
- egd_font_table = ets:new(egd_font_table, [named_table, ordered_set, public]),
- ok.
-
-glyph_insert(Font, Code, Translation, LSs) ->
- Element = {{Font, Code}, Translation, LSs},
- ets:insert(egd_font_table, Element).
-
-font_insert(Font, Description, Dimensions) ->
- Element = {{Font, information}, Description, Dimensions},
- ets:insert(egd_font_table, Element).
-
-%% Font loader functions
-
-is_font_loaded(Font) ->
- try
- case ets:lookup(egd_font_table, {Font, information}) of
- [] -> false;
- _ -> true
- end
- catch
- error:_ ->
- initialize_table(),
- false
- end.
-
-
-load_font_header({_Type, _Version, Font}) ->
- load_font_body(Font).
-
-load_font_body({Key,Desc,W,H,Glyphs,Bitmaps}) ->
- case is_font_loaded(Key) of
- true -> Key;
- false ->
- % insert dimensions
- font_insert(Key, Desc, {W,H}),
- parse_glyphs(Glyphs, Bitmaps, Key),
- Key
- end.
-
-parse_glyphs([], _ , _Key) -> ok;
-parse_glyphs([Glyph|Glyphs], Bs, Key) ->
- {Code, Translation, LSs} = parse_glyph(Glyph, Bs),
- glyph_insert(Key, Code, Translation, LSs),
- parse_glyphs(Glyphs, Bs, Key).
-
-parse_glyph({Code,W,H,X0,Y0,Xm,Offset}, Bitmasks) ->
- BytesPerLine = ((W+7) div 8),
- NumBytes = BytesPerLine*H,
- <<_:Offset/binary,Bitmask:NumBytes/binary,_/binary>> = Bitmasks,
- LSs = render_glyph(W,H,X0,Y0,Xm,Bitmask),
- {Code, {W,H,X0,Y0,Xm}, LSs}.
-
-render_glyph(W, H, X0, Y0, Xm, Bitmask) ->
- render_glyph(W,{0,H},X0,Y0,Xm,Bitmask, []).
-render_glyph(_W, {H,H}, _X0, _Y0, _Xm, _Bitmask, Out) -> Out;
-render_glyph(W, {Hi,H}, X0, Y0,Xm, Bitmask , LSs) ->
- N = ((W+7) div 8),
- O = N*Hi,
- <<_:O/binary, Submask/binary>> = Bitmask,
- LS = render_glyph_horizontal(
- Submask, % line glyph bitmask
- {down, W - 1}, % loop state
- W - 1, % Width
- []), % Linespans
- render_glyph(W,{Hi+1,H},X0,Y0,Xm, Bitmask, [LS|LSs]).
-
-render_glyph_horizontal(Value, {Pr, Px}, 0, Spans) ->
- Cr = bit_spin(Value, 0),
- case {Pr,Cr} of
- {up , up } -> % closure of interval since its last
- [{0, Px}|Spans];
- {up , down} -> % closure of interval
- [{1, Px}|Spans];
- {down, up } -> % beginning of interval
- [{0, 0}|Spans];
- {down, down} -> % no change in interval
- Spans
- end;
-render_glyph_horizontal(Value, {Pr, Px}, Cx, Spans) ->
- Cr = bit_spin(Value, Cx),
- case {Pr,Cr} of
- {up , up } -> % no change in interval
- render_glyph_horizontal(Value, {Cr, Px}, Cx - 1, Spans);
- {up , down} -> % closure of interval
- render_glyph_horizontal(Value, {Cr, Cx}, Cx - 1, [{Cx+1,Px}|Spans]);
- {down, up } -> % beginning of interval
- render_glyph_horizontal(Value, {Cr, Cx}, Cx - 1, Spans);
- {down, down} -> % no change in interval
- render_glyph_horizontal(Value, {Cr, Px}, Cx - 1, Spans)
- end.
-
-bit_spin(Value, Cx) ->
- <<_:Cx, Bit:1, _/bits>> = Value,
- case Bit of
- 1 -> up;
- 0 -> down
- end.
diff --git a/lib/percept/src/egd_png.erl b/lib/percept/src/egd_png.erl
deleted file mode 100644
index fe660513b4..0000000000
--- a/lib/percept/src/egd_png.erl
+++ /dev/null
@@ -1,105 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-
-%% This code was originally written by Dan Gudmundsson for png-handling in
-%% wings3d (e3d__png).
-%%
-%% @doc egd
-%%
-
--module(egd_png).
-
--export([binary/3]).
-
--include("egd.hrl").
-
--define(MAGIC, 137,$P,$N,$G,$\r,$\n,26,$\n).
-
--define(GREYSCALE, 0).
--define(TRUECOLOUR, 2).
--define(INDEXED, 3).
--define(GREYSCALE_A, 4).
--define(TRUECOLOUR_A,6).
-
--define(MAX_WBITS,15).
-
--define(CHUNK, 240).
-
--define(get4p1(Idx),((Idx) bsr 4)).
--define(get4p2(Idx),((Idx) band 16#0F)).
--define(get2p1(Idx),((Idx) bsr 6)).
--define(get2p2(Idx),(((Idx) bsr 4) band 3)).
--define(get2p3(Idx),(((Idx) bsr 2) band 3)).
--define(get2p4(Idx),((Idx) band 3)).
--define(get1p1(Idx),((Idx) bsr 7)).
--define(get1p2(Idx),(((Idx) bsr 6) band 1)).
--define(get1p3(Idx),(((Idx) bsr 5) band 1)).
--define(get1p4(Idx),(((Idx) bsr 4) band 1)).
--define(get1p5(Idx),(((Idx) bsr 3) band 1)).
--define(get1p6(Idx),(((Idx) bsr 2) band 1)).
--define(get1p7(Idx),(((Idx) bsr 1) band 1)).
--define(get1p8(Idx),((Idx) band 1)).
-
-binary(W, H, Bitmap) when is_binary(Bitmap) ->
- Z = zlib:open(),
- Binary = bitmap2png(W, H, Bitmap, Z),
- zlib:close(Z),
- Binary.
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-% Begin Tainted
-
-bitmap2png(W, H, Bitmap,Z) ->
- HDR = create_chunk(<<"IHDR",W:32,H:32,8:8,(png_type(r8g8b8)):8,0:8,0:8,0:8>>,Z),
- DATA = create_chunk(["IDAT",compress_image(0,3*W,Bitmap,[])],Z),
- END = create_chunk(<<"IEND">>,Z),
- list_to_binary([?MAGIC,HDR,DATA,END]).
-
-compress_image(I,RowLen, Bin, Acc) ->
- Pos = I*RowLen,
- case Bin of
- <<_:Pos/binary,Row:RowLen/binary,_/binary>> ->
- Filtered = filter_row(Row,RowLen),
- compress_image(I+1,RowLen,Bin,[Filtered|Acc]);
- _ when Pos == size(Bin) ->
- Filtered = list_to_binary(lists:reverse(Acc)),
- Compressed = zlib:compress(Filtered),
- Compressed
- end.
-
-filter_row(Row,_RowLen) ->
- [0,Row].
-
-% dialyzer warnings
-%png_type(g8) -> ?GREYSCALE;
-%png_type(a8) -> ?GREYSCALE;
-%png_type(r8g8b8a8) -> ?TRUECOLOUR_A;
-png_type(r8g8b8) -> ?TRUECOLOUR.
-
-create_chunk(Bin,Z) when is_list(Bin) ->
- create_chunk(list_to_binary(Bin),Z);
-create_chunk(Bin,Z) when is_binary(Bin) ->
- Sz = size(Bin)-4,
- Crc = zlib:crc32(Z,Bin),
- <<Sz:32,Bin/binary,Crc:32>>.
-
-% End tainted
diff --git a/lib/percept/src/egd_primitives.erl b/lib/percept/src/egd_primitives.erl
deleted file mode 100644
index b64189c552..0000000000
--- a/lib/percept/src/egd_primitives.erl
+++ /dev/null
@@ -1,412 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc egd_primitives
-%%
-
-
--module(egd_primitives).
--export([create/2,
- color/1,
- pixel/3,
- polygon/3,
- line/4,
- line/5,
- arc/4,
- arc/5,
- rectangle/4,
- filledRectangle/4,
- filledEllipse/4,
- filledTriangle/5,
- text/5]).
-
--export([info/1,
- object_info/1,
- rgb_float2byte/1]).
-
--export([arc_to_edges/3,
- convex_hull/1,
- edges/1]).
-
--include("egd.hrl").
-
-%% API info
-info(I) ->
- W = I#image.width, H = I#image.height,
- io:format("Dimensions: ~p x ~p~n", [W,H]),
- io:format("Number of image objects: ~p~n", [length(I#image.objects)]),
- TotalPoints = info_objects(I#image.objects,0),
- io:format("Total points: ~p [~p %]~n", [TotalPoints, 100*TotalPoints/(W*H)]),
- ok.
-
-info_objects([],N) -> N;
-info_objects([O | Os],N) ->
- Points = length(O#image_object.points),
-info_objects(Os,N+Points).
-
-object_info(O) ->
- io:format("Object information: ~p~n", [O#image_object.type]),
- io:format("- Number of points: ~p~n", [length(O#image_object.points)]),
- io:format("- Bounding box: ~p~n", [O#image_object.span]),
- io:format("- Color: ~p~n", [O#image_object.color]),
- ok.
-
-%% interface functions
-
-line(#image{objects=Os}=I, Sp, Ep, Color) ->
- line(#image{objects=Os}=I, Sp, Ep, 1, Color).
-
-line(#image{objects=Os}=I, Sp, Ep, Wd, Color) ->
- I#image{objects=[#image_object{
- internals = Wd,
- type = line,
- points = [Sp, Ep],
- span = span([Sp, Ep]),
- color = Color}|Os]}.
-
-arc(I, {Sx,Sy} = Sp, {Ex,Ey} = Ep, Color) ->
- X = Ex - Sx,
- Y = Ey - Sy,
- R = math:sqrt(X*X + Y*Y)/2,
- arc(I, Sp, Ep, R, Color).
-
-arc(#image{objects=Os}=I, Sp, Ep, D, Color) ->
- SpanPts = lists:flatten([
- [{X + D, Y + D},
- {X + D, Y - D},
- {X - D, Y + D},
- {X - D, Y - D}] || {X,Y} <- [Sp,Ep]]),
-
- I#image{objects=[#image_object{
- internals = D,
- type = arc,
- points = [Sp, Ep],
- span = span(SpanPts),
- color = Color}|Os]}.
-
-pixel(#image{objects=Os}=I, Point, Color) ->
- I#image{objects=[#image_object{
- type = pixel,
- points = [Point],
- span = span([Point]),
- color = Color}|Os]}.
-
-rectangle(#image{objects=Os}=I, Sp, Ep, Color) ->
- I#image{objects=[#image_object{
- type = rectangle,
- points = [Sp, Ep],
- span = span([Sp, Ep]),
- color = Color}|Os]}.
-
-filledRectangle(#image{objects=Os}=I, Sp, Ep, Color) ->
- I#image{objects=[#image_object{
- type = filled_rectangle,
- points = [Sp, Ep],
- span = span([Sp, Ep]),
- color = Color}|Os]}.
-
-filledEllipse(#image{objects=Os}=I, Sp, Ep, Color) ->
- {X0,Y0,X1,Y1} = Span = span([Sp, Ep]),
- Xr = (X1 - X0)/2,
- Yr = (Y1 - Y0)/2,
- Xp = - X0 - Xr,
- Yp = - Y0 - Yr,
- I#image{objects=[#image_object{
- internals = {Xp,Yp, Xr*Xr,Yr*Yr},
- type = filled_ellipse,
- points = [Sp, Ep],
- span = Span,
- color = Color}|Os]}.
-
-filledTriangle(#image{objects=Os}=I, P1, P2, P3, Color) ->
- I#image{objects=[#image_object{
- type = filled_triangle,
- points = [P1,P2,P3],
- span = span([P1,P2,P3]),
- color = Color}|Os]}.
-
-polygon(#image{objects=Os}=I, Points, Color) ->
- I#image{objects=[#image_object{
- type = polygon,
- points = Points,
- span = span(Points),
- color = Color}|Os]}.
-
-text(#image{objects=Os}=I, {Xs,Ys}=Sp, Font, Text, Color) ->
- {FW,FH} = egd_font:size(Font),
- Length = length(Text),
- Ep = {Xs + Length*FW, Ys + FH + 5},
- I#image{objects=[#image_object{
- internals = {Font, Text},
- type = text_horizontal,
- points = [Sp],
- span = span([Sp,Ep]),
- color = Color}|Os]}.
-
-create(W, H) ->
- #image{width = W, height = H}.
-
-color(Color) when is_atom(Color) -> rgba_byte2float(name_to_color(Color, 255));
-color({Color, A}) when is_atom(Color) -> rgba_byte2float(name_to_color(Color, A));
-color({R,G,B}) -> rgba_byte2float({R,G,B, 255});
-color(C) -> rgba_byte2float(C).
-
-name_to_color(Color, A) ->
- case Color of
- %% HTML default colors
- black -> { 0, 0, 0, A};
- silver -> {192, 192, 192, A};
- gray -> {128, 128, 128, A};
- white -> {128, 0, 0, A};
- maroon -> {255, 0, 0, A};
- red -> {128, 0, 128, A};
- purple -> {128, 0, 128, A};
- fuchia -> {255, 0, 255, A};
- green -> { 0, 128, 0, A};
- lime -> { 0, 255, 0, A};
- olive -> {128, 128, 0, A};
- yellow -> {255, 255, 0, A};
- navy -> { 0, 0, 128, A};
- blue -> { 0, 0, 255, A};
- teal -> { 0, 128, 0, A};
- aqua -> { 0, 255, 155, A};
-
- %% HTML color extensions
- steelblue -> { 70, 130, 180, A};
- royalblue -> { 4, 22, 144, A};
- cornflowerblue -> {100, 149, 237, A};
- lightsteelblue -> {176, 196, 222, A};
- mediumslateblue -> {123, 104, 238, A};
- slateblue -> {106, 90, 205, A};
- darkslateblue -> { 72, 61, 139, A};
- midnightblue -> { 25, 25, 112, A};
- darkblue -> { 0, 0, 139, A};
- mediumblue -> { 0, 0, 205, A};
- dodgerblue -> { 30, 144, 255, A};
- deepskyblue -> { 0, 191, 255, A};
- lightskyblue -> {135, 206, 250, A};
- skyblue -> {135, 206, 235, A};
- lightblue -> {173, 216, 230, A};
- powderblue -> {176, 224, 230, A};
- azure -> {240, 255, 255, A};
- lightcyan -> {224, 255, 255, A};
- paleturquoise -> {175, 238, 238, A};
- mediumturquoise -> { 72, 209, 204, A};
- lightseagreen -> { 32, 178, 170, A};
- darkcyan -> { 0, 139, 139, A};
- cadetblue -> { 95, 158, 160, A};
- darkturquoise -> { 0, 206, 209, A};
- cyan -> { 0, 255, 255, A};
- turquoise -> { 64, 224, 208, A};
- aquamarine -> {127, 255, 212, A};
- mediumaquamarine -> {102, 205, 170, A};
- darkseagreen -> {143, 188, 143, A};
- mediumseagreen -> { 60, 179, 113, A};
- seagreen -> { 46, 139, 87, A};
- darkgreen -> { 0, 100, 0, A};
- forestgreen -> { 34, 139, 34, A};
- limegreen -> { 50, 205, 50, A};
- chartreuse -> {127, 255, 0, A};
- lawngreen -> {124, 252, 0, A};
- greenyellow -> {173, 255, 47, A};
- yellowgreen -> {154, 205, 50, A};
- palegreen -> {152, 251, 152, A};
- lightgreen -> {144, 238, 144, A};
- springgreen -> { 0, 255, 127, A};
- darkolivegreen -> { 85, 107, 47, A};
- olivedrab -> {107, 142, 35, A};
- darkkhaki -> {189, 183, 107, A};
- darkgoldenrod -> {184, 134, 11, A};
- goldenrod -> {218, 165, 32, A};
- gold -> {255, 215, 0, A};
- khaki -> {240, 230, 140, A};
- palegoldenrod -> {238, 232, 170, A};
- blanchedalmond -> {255, 235, 205, A};
- moccasin -> {255, 228, 181, A};
- wheat -> {245, 222, 179, A};
- navajowhite -> {255, 222, 173, A};
- burlywood -> {222, 184, 135, A};
- tan -> {210, 180, 140, A};
- rosybrown -> {188, 143, 143, A};
- sienna -> {160, 82, 45, A};
- saddlebrown -> {139, 69, 19, A};
- chocolate -> {210, 105, 30, A};
- peru -> {205, 133, 63, A};
- sandybrown -> {244, 164, 96, A};
- darkred -> {139, 0, 0, A};
- brown -> {165, 42, 42, A};
- firebrick -> {178, 34, 34, A};
- indianred -> {205, 92, 92, A};
- lightcoral -> {240, 128, 128, A};
- salmon -> {250, 128, 114, A};
- darksalmon -> {233, 150, 122, A};
- lightsalmon -> {255, 160, 122, A};
- coral -> {255, 127, 80, A};
- tomato -> {255, 99, 71, A};
- darkorange -> {255, 140, 0, A};
- orange -> {255, 165, 0, A};
- orangered -> {255, 69, 0, A};
- crimson -> {220, 20, 60, A};
- deeppink -> {255, 20, 147, A};
- fuchsia -> {255, 0, 255, A};
- magenta -> {255, 0, 255, A};
- hotpink -> {255, 105, 180, A};
- lightpink -> {255, 182, 193, A};
- pink -> {255, 192, 203, A};
- palevioletred -> {219, 112, 147, A};
- mediumvioletred -> {199, 21, 133, A};
- darkmagenta -> {139, 0, 139, A};
- mediumpurple -> {147, 112, 219, A};
- blueviolet -> {138, 43, 226, A};
- indigo -> { 75, 0, 130, A};
- darkviolet -> {148, 0, 211, A};
- darkorchid -> {153, 50, 204, A};
- mediumorchid -> {186, 85, 211, A};
- orchid -> {218, 112, 214, A};
- violet -> {238, 130, 238, A};
- plum -> {221, 160, 221, A};
- thistle -> {216, 191, 216, A};
- lavender -> {230, 230, 250, A};
- ghostwhite -> {248, 248, 255, A};
- aliceblue -> {240, 248, 255, A};
- mintcream -> {245, 255, 250, A};
- honeydew -> {240, 255, 240, A};
- lemonchiffon -> {255, 250, 205, A};
- cornsilk -> {255, 248, 220, A};
- lightyellow -> {255, 255, 224, A};
- ivory -> {255, 255, 240, A};
- floralwhite -> {255, 250, 240, A};
- linen -> {250, 240, 230, A};
- oldlace -> {253, 245, 230, A};
- antiquewhite -> {250, 235, 215, A};
- bisque -> {255, 228, 196, A};
- peachpuff -> {255, 218, 185, A};
- papayawhip -> {255, 239, 213, A};
- beige -> {245, 245, 220, A};
- seashell -> {255, 245, 238, A};
- lavenderblush -> {255, 240, 245, A};
- mistyrose -> {255, 228, 225, A};
- snow -> {255, 250, 250, A};
- whitesmoke -> {245, 245, 245, A};
- gainsboro -> {220, 220, 220, A};
- lightgrey -> {211, 211, 211, A};
- darkgray -> {169, 169, 169, A};
- lightslategray -> {119, 136, 153, A};
- slategray -> {112, 128, 144, A};
- dimgray -> {105, 105, 105, A};
- darkslategray -> { 47, 79, 79, A};
- mediumspringgreen -> { 0, 250, 154, A};
- lightgoldenrodyellow -> {250, 250, 210, A}
- end.
-
-
-%%% Generic transformations
-
-%% arc_to_edges
-%% In:
-%% P1 :: point(),
-%% P2 :: point(),
-%% D :: float(),
-%% Out:
-%% Res :: [edges()]
-
-arc_to_edges(P0, P1, D) when abs(D) < 0.5 -> [{P0,P1}];
-arc_to_edges({X0,Y0}, {X1,Y1}, D) ->
- Vx = X1 - X0,
- Vy = Y1 - Y0,
-
- Mx = X0 + 0.5 * Vx,
- My = Y0 + 0.5 * Vy,
-
- % Scale V by Rs
- L = math:sqrt(Vx*Vx + Vy*Vy),
- Sx = D*Vx/L,
- Sy = D*Vy/L,
-
- Bx = trunc(Mx - Sy),
- By = trunc(My + Sx),
-
- arc_to_edges({X0,Y0}, {Bx,By}, D/4) ++ arc_to_edges({Bx,By}, {X1,Y1}, D/4).
-
-%% edges
-%% In:
-%% Pts :: [point()]
-%% Out:
-%% Edges :: [{point(),point()}]
-
-edges([]) -> [];
-edges([P0|_] = Pts) -> edges(Pts, P0,[]).
-edges([P1], P0, Out) -> [{P1,P0}|Out];
-edges([P1,P2|Pts],P0,Out) -> edges([P2|Pts],P0,[{P1,P2}|Out]).
-
-%% convex_hull
-%% In:
-%% Ps :: [point()]
-%% Out:
-%% Res :: [point()]
-
-convex_hull(Ps) ->
- P0 = lower_right(Ps),
- [P1|Ps1] = lists:sort(fun
- (P2,P1) ->
- case point_side({P1,P0},P2) of
- left -> true;
- _ -> false
- end
- end, Ps -- [P0]),
- convex_hull(Ps1, [P1,P0]).
-
-convex_hull([], W) -> W;
-convex_hull([P|Pts], [P1,P2|W]) ->
- case point_side({P2,P1},P) of
- left -> convex_hull(Pts, [P,P1,P2|W]);
- _ -> convex_hull([P|Pts], [P2|W])
- end.
-
-lower_right([P|Pts]) -> lower_right(P, Pts).
-lower_right(P, []) -> P;
-lower_right({X0,Y0}, [{_,Y}|Pts]) when Y < Y0 -> lower_right({X0,Y0}, Pts);
-lower_right({X0,Y0}, [{X,Y}|Pts]) when X < X0, Y < Y0 -> lower_right({X0,Y0}, Pts);
-lower_right(_,[P|Pts]) -> lower_right(P, Pts).
-
-point_side({{X0,Y0}, {X1, Y1}}, {X2, Y2}) -> point_side((X1 - X0)*(Y2 - Y0) - (X2 - X0)*(Y1 - Y0)).
-point_side(D) when D > 0 -> left;
-point_side(D) when D < 0 -> right;
-point_side(_) -> on_line.
-
-%% AUX
-
-span([{X0,Y0}|Points]) ->
- span(Points,X0,Y0,X0,Y0).
-span([{X0,Y0}|Points],Xmin,Ymin,Xmax,Ymax) ->
- span(Points,erlang:min(Xmin,X0),
- erlang:min(Ymin,Y0),
- erlang:max(Xmax,X0),
- erlang:max(Ymax,Y0));
-span([],Xmin,Ymin,Xmax,Ymax) ->
- {Xmin,Ymin,Xmax,Ymax}.
-
-
-rgb_float2byte({R,G,B}) -> rgb_float2byte({R,G,B,1.0});
-rgb_float2byte({R,G,B,A}) ->
- {trunc(R*255), trunc(G*255), trunc(B*255), trunc(A*255)}.
-
-rgba_byte2float({R,G,B,A}) ->
- {R/255,G/255,B/255,A/255}.
diff --git a/lib/percept/src/egd_render.erl b/lib/percept/src/egd_render.erl
deleted file mode 100644
index 6c708e3e86..0000000000
--- a/lib/percept/src/egd_render.erl
+++ /dev/null
@@ -1,664 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc egd_render
-%%
-
--module(egd_render).
-
--export([binary/1, binary/2]).
--export([eps/1]).
--compile(inline).
-
--export([line_to_linespans/3]).
-
--include("egd.hrl").
--define('DummyC',0).
-
-binary(Image) ->
- binary(Image, opaque).
-
-binary(Image, Type) ->
- parallel_binary(precompile(Image),Type).
-
-parallel_binary(Image = #image{ height = Height },Type) ->
- case erlang:min(erlang:system_info(schedulers), Height) of
- 1 ->
- % if the height or the number of schedulers is 1
- % do the scanlines in this process.
- W = Image#image.width,
- Bg = Image#image.background,
- Os = Image#image.objects,
- erlang:list_to_binary([scanline(Y, Os, {0,0,W - 1, Bg}, Type)
- || Y <- lists:seq(1, Height)]);
- Np ->
- Pids = start_workers(Np, Type),
- Handler = handle_workers(Height, Pids),
- init_workers(Image, Handler, Pids),
- Res = receive_binaries(Height),
- finish_workers(Pids),
- Res
- end.
-
-start_workers(Np, Type) ->
- start_workers(Np, Type, []).
-
-start_workers( 0, _, Pids) -> Pids;
-start_workers(Np, Type, Pids) when Np > 0 ->
- start_workers(Np - 1, Type, [spawn_link(fun() -> worker(Type) end)|Pids]).
-
-worker(Type) ->
- receive
- {Pid, data, #image{ objects = Os, width = W, background = Bg }} ->
- worker(Os, W, Bg, Type, Pid)
- end.
-
-worker(Objects, Width, Bg, Type, Collector) ->
- receive
- {Pid, scan, {Ys, Ye}} ->
- lists:foreach(fun
- (Y) ->
- Bin = erlang:list_to_binary(scanline(Y, Objects, {0,0,Width - 1, Bg}, Type)),
- Collector ! {scan, Y, Bin}
- end, lists:seq(Ys,Ye)),
- Pid ! {self(), scan_complete},
- worker(Objects, Width, Bg, Type, Collector);
- {Pid, scan, Y} ->
- Bin = erlang:list_to_binary(scanline(Y, Objects, {0,0,Width - 1, Bg}, Type)),
- Collector ! {scan, Y, Bin},
- Pid ! {self(), scan_complete},
- worker(Objects, Width, Bg, Type, Collector);
- {_, done} ->
- ok
- end.
-
-init_workers(_Image, _Handler, []) -> ok;
-init_workers(Image, Handler, [Pid|Pids]) ->
- Pid ! {self(), data, Image},
- Handler ! {Pid, scan_complete},
- init_workers(Image, Handler, Pids).
-
-handle_workers(H, Pids) ->
- spawn_link(fun() -> handle_workers(H, H, length(Pids)) end).
-
-handle_workers(_, 0, _) -> ok;
-handle_workers(H, Hi, Np) when H > 0 ->
- N = trunc(Hi/(2*Np)),
- receive
- {Pid, scan_complete} ->
- if N < 2 ->
- Pid ! {self(), scan, Hi},
- handle_workers(H, Hi - 1, Np);
- true ->
- Pid ! {self(), scan, {Hi - N, Hi}},
- handle_workers(H, Hi - 1 - N, Np)
- end
- end.
-
-finish_workers([]) -> ok;
-finish_workers([Pid|Pids]) ->
- Pid ! {self(), done},
- finish_workers(Pids).
-
-receive_binaries(H) ->
- receive_binaries(H, []).
-
-receive_binaries(0, Bins) -> erlang:list_to_binary(Bins);
-receive_binaries(H, Bins) when H > 0 ->
- receive
- {scan, H, Bin} ->
- receive_binaries(H - 1, [Bin|Bins])
- end.
-
-scanline(Y, Os, {_,_,Width,_}=LSB, Type) ->
- OLSs = parse_objects_on_line(Y-1, Width, Os),
- RLSs = resulting_line_spans([LSB|OLSs],Type),
- [ lists:duplicate(Xr - Xl + 1, <<(trunc(R*255)):8,(trunc(G*255)):8,(trunc(B*255)):8>>) || {_,Xl, Xr, {R,G,B,_}} <- RLSs ].
-
-resulting_line_spans(LSs,Type) ->
- %% Build a list of "transitions" from left to right.
- Trans = line_spans_to_trans(LSs),
- %% Convert list of "transitions" to linespans.
- trans_to_line_spans(Trans,Type).
-
-line_spans_to_trans(LSs) ->
- line_spans_to_trans(LSs,[],0).
-
-line_spans_to_trans([],Db,_) ->
- lists:sort(Db);
-line_spans_to_trans([{_,L,R,C}|LSs],Db,Z) ->
- line_spans_to_trans(LSs,[{{L,Z,start},C},{{R+1,Z,stop},C}|Db],Z+1).
-
-trans_to_line_spans(Trans,Type) ->
- trans_to_line_spans(simplify_trans(Trans,Type,[],{0.0,0.0,0.0,0.0},[])).
-
-trans_to_line_spans(SimpleTrans) ->
- trans_to_line_spans1(SimpleTrans,[]).
-
-trans_to_line_spans1([],Spans) ->
- Spans;
-trans_to_line_spans1([_],Spans) ->
- Spans;
-trans_to_line_spans1([{L1,_},{L2,C2}|SimpleTrans],Spans) ->
- %% We are going backwards now...
- trans_to_line_spans1([{L2,C2}|SimpleTrans],[{?DummyC,L2,L1-1,C2}|Spans]).
-
-simplify_trans([],_,_,_,Acc) ->
- Acc;
-simplify_trans([{{L,_,_},_}|_] = Trans,Type,Layers,OldC,Acc) ->
- {NextTrans,RestTrans} =
- lists:splitwith(fun({{L1,_,_},_}) when L1 == L ->
- true;
- (_) ->
- false
- end, Trans),
- {C,NewLayers} = color(NextTrans,Layers,Type,OldC),
- case OldC of
- C -> %% No change in color, so transition unnecessary.
- simplify_trans(RestTrans,Type,NewLayers,OldC,Acc);
- _ ->
- simplify_trans(RestTrans,Type,NewLayers,C,[{L,C}|Acc])
- end.
-
-color(Trans,Layers,Type,OldC) ->
- case modify_layers(Layers,Trans) of
- Layers ->
- {OldC,Layers};
- NewLayers ->
- {color(NewLayers,Type),NewLayers}
- end.
-
-color([],_) -> {0.0,0.0,0.0,0.0};
-color([{_,C}|_],opaque) -> C;
-color(Layers,alpha) -> color1({0.0,0.0,0.0,0.0},Layers).
-
-color1(Color,[]) -> Color;
-color1(Color,[{_,C}|Layers]) -> color1(alpha_blend(Color,C),Layers).
-
-modify_layers(Layers,[]) -> Layers;
-modify_layers(Layers,[{{_,Z,start},C}|Trans]) ->
- modify_layers(add_layer(Layers, Z, C), Trans);
-modify_layers(Layers,[{{_,Z,stop },C}|Trans]) ->
- modify_layers(remove_layer(Layers, Z, C), Trans).
-
-add_layer([{Z1,_}=H|Layers],Z,C) when Z1 > Z ->
- [H|add_layer(Layers,Z,C)];
-add_layer(Layers,Z,C) ->
- [{Z,C}|Layers].
-
-remove_layer(Layers,Z,C) ->
- Layers -- [{Z,C}].
-
-alpha_blend({R1,G1,B1,A1}, {R2,G2,B2,A2}) when is_float(A1), is_float(A2)->
- Beta = A2*(1.0 - A1),
- A = A1 + Beta,
- R = R1*A1 + R2*Beta,
- G = G1*A1 + G2*Beta,
- B = B1*A1 + B2*Beta,
- {R,G,B,A}.
-
-parse_objects_on_line(Y, Width, Objects) ->
- parse_objects_on_line(Y, 1, Width, Objects, []).
-parse_objects_on_line(_Y, _Z, _, [], Out) -> lists:flatten(Out);
-parse_objects_on_line(Y, Z, Width, [O|Os], Out) ->
- case is_object_on_line(O, Y) of
- false ->
- parse_objects_on_line(Y, Z + 1, Width, Os, Out);
- true ->
- OLs = object_line_data(O,Y,Z),
- TOLs = trim_object_line_data(OLs, Width),
- parse_objects_on_line(Y, Z + 1, Width, Os, [TOLs|Out])
- end.
-
-trim_object_line_data(OLs, Width) ->
- trim_object_line_data(OLs, Width, []).
-trim_object_line_data([], _, Out) -> Out;
-
-trim_object_line_data([{_, Xl, _, _}|OLs], Width, Out) when Xl > Width ->
- trim_object_line_data(OLs, Width, Out);
-trim_object_line_data([{_, _, Xr, _}|OLs], Width, Out) when Xr < 0 ->
- trim_object_line_data(OLs, Width, Out);
-trim_object_line_data([{Z, Xl, Xr, C}|OLs], Width, Out) ->
- trim_object_line_data(OLs, Width, [{Z, erlang:max(0,Xl), erlang:min(Xr,Width), C}|Out]).
-
-% object_line_data
-% In:
-% Object :: image_object()
-% Y :: index of height
-% Z :: index of depth
-% Out:
-% OLs = [{Z, Xl, Xr, Color}]
-% Z = index of height
-% Xl = left X index
-% Xr = right X index
-% Purpose:
-% Calculate the length (start and finish index) of an objects horizontal
-% line given the height index.
-
-object_line_data(#image_object{type=rectangle,
- span={X0,Y0,X1,Y1}, color=C}, Y, Z) ->
- if
- Y0 =:= Y ; Y1 =:= Y ->
- [{Z, X0, X1, C}];
- true ->
- [{Z, X0, X0, C},
- {Z, X1, X1, C}]
- end;
-
-object_line_data(#image_object{type=filled_rectangle,
- span={X0, _, X1, _}, color=C}, _Y, Z) ->
- [{Z, X0, X1, C}];
-
-object_line_data(#image_object{type=filled_ellipse,
- internals={Xr,Yr,Yr2}, span={X0,Y0,X1,Y1}, color=C}, Y, Z) ->
- if
- X1 - X0 =:= 0; Y1 - Y0 =:= 0 ->
- [{Z, X0, X1, C}];
- true ->
- Yo = trunc(Y - Y0 - Yr),
- Yo2 = Yo*Yo,
- Xo = math:sqrt((1 - Yo2/Yr2))*Xr,
- [{Z, round(X0 - Xo + Xr), round(X0 + Xo + Xr), C}]
- end;
-
-object_line_data(#image_object{type=filled_triangle,
- intervals=Is, color=C}, Y, Z) ->
- case lists:keyfind(Y, 1, Is) of
- {Y, Xl, Xr} -> [{Z, Xl, Xr, C}];
- false -> []
- end;
-
-object_line_data(#image_object{type=line,
- intervals=M, color={R,G,B,_}}, Y, Z) ->
- case M of
- #{Y := Ls} -> [{Z, Xl, Xr, {R,G,B,1.0-C/255}}||{Xl,Xr,C} <- Ls];
- _ -> []
- end;
-
-object_line_data(#image_object{type=polygon,
- color=C, intervals=Is}, Y, Z) ->
- [{Z, Xl, Xr, C} || {Yp, Xl, Xr} <- Is, Yp =:= Y];
-
-object_line_data(#image_object{type=text_horizontal,
- color=C, intervals=Is}, Y, Z) ->
- [{Z, Xl, Xr, C} || {Yg, Xl, Xr} <- Is, Yg =:= Y];
-
-object_line_data(#image_object{type=pixel,
- span={X0,_,X1,_}, color=C}, _, Z) ->
- [{Z, X0, X1, C}].
-
-is_object_on_line(#image_object{span={_,Y0,_,Y1}}, Y) ->
- if Y < Y0; Y > Y1 -> false;
- true -> true
- end.
-
-%%% primitives to line_spans
-
-%% compile objects to linespans
-
-precompile(#image{objects = Os}=I) ->
- I#image{objects = precompile_objects(Os)}.
-
-precompile_objects([]) -> [];
-precompile_objects([#image_object{type=line, internals=W, points=[P0,P1]}=O|Os]) ->
- [O#image_object{intervals = linespans_to_map(line_to_linespans(P0,P1,W))}|precompile_objects(Os)];
-precompile_objects([#image_object{type=filled_triangle, points=[P0,P1,P2]}=O|Os]) ->
- [O#image_object{intervals = triangle_ls(P0,P1,P2)}|precompile_objects(Os)];
-precompile_objects([#image_object{type=polygon, points=Pts}=O|Os]) ->
- [O#image_object{intervals = polygon_ls(Pts)}|precompile_objects(Os)];
-precompile_objects([#image_object{type=filled_ellipse, span={X0,Y0,X1,Y1}}=O|Os]) ->
- Xr = (X1 - X0)/2,
- Yr = (Y1 - Y0)/2,
- Yr2 = Yr*Yr,
- [O#image_object{internals={Xr,Yr,Yr2}}|precompile_objects(Os)];
-precompile_objects([#image_object{type=arc, points=[P0,P1], internals=D}=O|Os]) ->
- Es = egd_primitives:arc_to_edges(P0, P1, D),
- Ls = lists:foldl(fun ({Ep0,Ep1},M) ->
- linespans_to_map(line_to_linespans(Ep0,Ep1,1),M)
- end, #{}, Es),
- [O#image_object{type=line, intervals=Ls}|precompile_objects(Os)];
-precompile_objects([#image_object{type=text_horizontal,
- points=[P0], internals={Font,Text}}=O|Os]) ->
- [O#image_object{intervals=text_horizontal_ls(P0,Font,Text)}|precompile_objects(Os)];
-precompile_objects([O|Os]) ->
- [O|precompile_objects(Os)].
-
-% triangle
-
-triangle_ls(P1,P2,P3) ->
- % Find top point (or left most top point),
- % From that point, two lines will be drawn to the
- % other points.
- % For each Y step,
- % bresenham_line_interval for each of the two lines
- % Find the left most and the right most for those lines
- % At an end point, a new line to the point already being drawn
- % repeat same procedure as above
- [Sp1, Sp2, Sp3] = tri_pt_ysort([P1,P2,P3]),
- triangle_ls_lp(tri_ls_ysort(line_to_linespans(Sp1,Sp2,1)), Sp2,
- tri_ls_ysort(line_to_linespans(Sp1,Sp3,1)), Sp3, []).
-
-% There will be Y mismatches between the two lists since bresenham is not perfect.
-% I can be remedied with checking intervals this could however be costly and
-% it may not be necessary, depending on how exact we need the points to be.
-% It should at most differ by one and endpoints should be fine.
-
-triangle_ls_lp([],_,[],_,Out) -> Out;
-triangle_ls_lp(LSs1, P1, [], P2, Out) ->
- SLSs = tri_ls_ysort(line_to_linespans(P2,P1,1)),
- N2 = length(SLSs),
- N1 = length(LSs1),
- if
- N1 > N2 ->
- [_|ILSs] = LSs1,
- triangle_ls_lp(ILSs, SLSs, Out);
- N2 > N1 ->
- [_|ILSs] = SLSs,
- triangle_ls_lp(LSs1, ILSs, Out);
- true ->
- triangle_ls_lp(LSs1, SLSs, Out)
- end;
-triangle_ls_lp([], P1, LSs2, P2, Out) ->
- SLSs = tri_ls_ysort(line_to_linespans(P1,P2,1)),
- N1 = length(SLSs),
- N2 = length(LSs2),
- if
- N1 > N2 ->
- [_|ILSs] = SLSs,
- triangle_ls_lp(ILSs, LSs2, Out);
- N2 > N1 ->
- [_|ILSs] = LSs2,
- triangle_ls_lp(SLSs, ILSs, Out);
- true ->
- triangle_ls_lp(SLSs, LSs2, Out)
- end;
-triangle_ls_lp([LS1|LSs1],P1,[LS2|LSs2],P2, Out) ->
- {Y, Xl1, Xr1,_Ca1} = LS1,
- {_, Xl2, Xr2,_Ca2} = LS2,
- Xr = lists:max([Xl1,Xr1,Xl2,Xr2]),
- Xl = lists:min([Xl1,Xr1,Xl2,Xr2]),
- triangle_ls_lp(LSs1,P1,LSs2,P2,[{Y,Xl,Xr}|Out]).
-
-triangle_ls_lp([],[],Out) -> Out;
-triangle_ls_lp([],_,Out) -> Out;
-triangle_ls_lp(_,[],Out) -> Out;
-triangle_ls_lp([LS1|LSs1], [LS2|LSs2], Out) ->
- {Y, Xl1, Xr1, _Ca1} = LS1,
- {_, Xl2, Xr2, _Ca2} = LS2,
- Xr = lists:max([Xl1,Xr1,Xl2,Xr2]),
- Xl = lists:min([Xl1,Xr1,Xl2,Xr2]),
- triangle_ls_lp(LSs1,LSs2,[{Y,Xl,Xr}|Out]).
-
-tri_pt_ysort(Pts) ->
- % {X,Y}
- lists:sort(
- fun ({_,Y1},{_,Y2}) ->
- if Y1 > Y2 -> false; true -> true end
- end, Pts).
-
-tri_ls_ysort(LSs) ->
- % {Y, Xl, Xr, Ca}
- lists:sort(
- fun ({Y1,_,_,_},{Y2,_,_,_}) ->
- if Y1 > Y2 -> false; true -> true end
- end, LSs).
-
-% polygon_ls
-% In:
-% Pts :: [{X,Y}]
-% Out:
-% LSs :: [{Y,Xl,Xr}]
-% Purpose:
-% Make polygon line spans
-% Algorithm:
-% 1. Find the left most (lm) point
-% 2. Find the two points adjacent to that point
-% The tripplet will make a triangle
-% 3. Ensure no points lies within the triangle
-% 4a.No points within triangle,
-% make triangle,
-% remove lm point
-% 1.
-% 4b.point(s) within triangle,
-%
-
-
-polygon_ls(Pts) ->
- % Make triangles
- Tris = polygon_tri(Pts),
- % interval triangles
- lists:flatten(polygon_tri_ls(Tris, [])).
-
-polygon_tri_ls([], Out) -> Out;
-polygon_tri_ls([{P1,P2,P3}|Tris], Out) ->
- polygon_tri_ls(Tris, [triangle_ls(P1,P2,P3)|Out]).
-
-polygon_tri(Pts) ->
- polygon_tri(polygon_lm_pt(Pts), []).
-
-
-polygon_tri([P1,P2,P3],Tris) -> [{P1,P2,P3}|Tris];
-polygon_tri([P2,P1,P3|Pts], Tris) ->
- case polygon_tri_test(P1,P2,P3,Pts) of
- false -> polygon_tri(polygon_lm_pt([P2,P3|Pts]), [{P1,P2,P3}|Tris]);
- [LmPt|Ptsn] -> polygon_tri([P2,P1,LmPt,P3|Ptsn], Tris)
- end.
-
-polygon_tri_test(P1,P2,P3, Pts) ->
- polygon_tri_test(P1,P2,P3, Pts, []).
-
-polygon_tri_test(_,_,_, [], _) -> false;
-polygon_tri_test(P1,P2,P3,[Pt|Pts], Ptsr) ->
- case point_inside_triangle(Pt, P1,P2,P3) of
- false -> polygon_tri_test(P1,P2,P3, Pts, [Pt|Ptsr]);
- true -> [Pt|Pts] ++ lists:reverse(Ptsr)
- end.
-
-% polygon_lm_pt
-% In:
-% Pts :: [{X,Y}]
-% Out
-% LmPts = [{X0,Y0},{Xmin,Y0},{X1,Y1},...]
-% Purpose:
-% The order of the list is important
-% rotate the elements until Xmin is first
-% This is not extremly fast.
-
-polygon_lm_pt(Pts) ->
- Xs = [X||{X,_}<-Pts],
- polygon_lm_pt(Pts, lists:min(Xs), []).
-
-polygon_lm_pt([Pt0,{X,_}=Ptm | Pts], Xmin, Ptsr) when X > Xmin ->
- polygon_lm_pt([Ptm|Pts], Xmin, [Pt0|Ptsr]);
-polygon_lm_pt(Pts, _, Ptsr) ->
- Pts ++ lists:reverse(Ptsr).
-
-
-% return true if P is inside triangle (p1,p2,p3),
-% otherwise false.
-
-points_same_side({P1x,P1y}, {P2x,P2y}, {L1x,L1y}, {L2x,L2y}) ->
- ((P1x - L1x)*(L2y - L1y) - (L2x - L1x)*(P1y - L1y) *
- (P2x - L1x)*(L2y - L1y) - (L2x - L1x)*(P2y - L1y)) >= 0.
-
-point_inside_triangle(P, P1, P2, P3) ->
- points_same_side(P, P1, P2, P3) and
- points_same_side(P, P2, P1, P3) and
- points_same_side(P, P3, P1, P2).
-
-%% [{Y, Xl, Xr}] -> #{Y := [{Xl,Xr}]}
-%% Reorganize linspans to a map with Y as key.
-
-linespans_to_map(Ls) ->
- linespans_to_map(Ls,#{}).
-linespans_to_map([{Y,Xl,Xr,C}|Ls], M) ->
- case M of
- #{Y := Spans} -> linespans_to_map(Ls, M#{Y := [{Xl,Xr,C}|Spans]});
- _ -> linespans_to_map(Ls, M#{Y => [{Xl,Xr,C}]})
- end;
-linespans_to_map([], M) ->
- M.
-
-
-%% line_to_linespans
-%% Anti-aliased thick line
-%% Do it CPS style
-%% In:
-%% P1 :: point()
-%% P2 :: point()
-%% Out:
-%% [{Y,Xl,Xr}]
-%%
-line_to_linespans({X0,Y0},{X1,Y1},Wd) ->
- Dx = abs(X1-X0),
- Dy = abs(Y1-Y0),
- Sx = if X0 < X1 -> 1; true -> -1 end,
- Sy = if Y0 < Y1 -> 1; true -> -1 end,
- E0 = Dx - Dy,
- Ed = if Dx + Dy =:= 0 -> 1; true -> math:sqrt(Dx*Dx + Dy*Dy) end,
- line_to_ls(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E0,Ed,(Wd+1)/2,[]).
-
-line_to_ls(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0) ->
- C = max(0, 255*(abs(E - Dx+Dy)/Ed - Wd + 1)),
- Ls1 = [{Y0,X0,X0,C}|Ls0],
- line_to_ls_sx(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls1,E).
-
-line_to_ls_sx(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2) when 2*E2 > -Dx ->
- line_to_ls_sx_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2+Dy,Y0);
-line_to_ls_sx(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2) ->
- line_to_ls_sy(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2,X0).
-
-line_to_ls_sx_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,E2,Y) when E2 < Ed*Wd andalso
- (Y1 =/= Y orelse Dx > Dy) ->
- Y2 = Y + Sy,
- C = max(0,255*(abs(E2)/Ed-Wd+1)),
- Ls = [{Y2,X0,X0,C}|Ls0],
- line_to_ls_sx_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2+Dx,Y2);
-line_to_ls_sx_do(X0,_Y0,X1,_Y1,_Dx,_Dy,_Sx,_Sy,_E,_Ed,_Wd,Ls,_E2,_Y) when X0 =:= X1 ->
- Ls;
-line_to_ls_sx_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,_E2,_Y) ->
- line_to_ls_sy(X0+Sx,Y0,X1,Y1,Dx,Dy,Sx,Sy,E-Dy,Ed,Wd,Ls,E,X0).
-
-line_to_ls_sy(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,E2,X) when 2*E2 =< Dy ->
- line_to_ls_sy_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,Dx-E2,X);
-line_to_ls_sy(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,_E2,_X) ->
- line_to_ls(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0).
-
-line_to_ls_sy_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,E2,X) when E2 < Ed*Wd andalso
- (X1 =/= X orelse Dx < Dy) ->
- X2 = X + Sx,
- C = max(0,255*(abs(E2)/Ed-Wd+1)),
- Ls = [{Y0,X2,X2,C}|Ls0],
- line_to_ls_sy_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls,E2+Dy,X2);
-line_to_ls_sy_do(_X0,Y0,_X1,Y1,_Dx,_Dy,_Sx,_Sy,_E,_Ed,_Wd,Ls,_E2,_X) when Y0 =:= Y1 ->
- Ls;
-line_to_ls_sy_do(X0,Y0,X1,Y1,Dx,Dy,Sx,Sy,E,Ed,Wd,Ls0,_E2,_X) ->
- line_to_ls(X0,Y0+Sy,X1,Y1,Dx,Dy,Sx,Sy,E+Dx,Ed,Wd,Ls0).
-
-% Text
-
-text_horizontal_ls(Point, Font, Chars) ->
- {_Fw,Fh} = egd_font:size(Font),
- text_intervals(Point, Fh, Font, Chars, []).
-
-% This is stupid. The starting point is the top left (Ptl) but the font
-% offsets is relative to the bottom right origin,
-% {Xtl,Ytl} -------------------------
-% | |
-% | Glyph BoundingBox |
-% | -------- |
-% | |Bitmap| Gh |
-% FH |-Gx0-|Data | |
-% | -------- |
-% | | |
-% | Gy0 |
-% | | |
-% Glyph (0,0)------------------------- Gxm (Glyph X move)
-% FW
-% Therefore, we need Yo, which is Yo = FH - Gy0 - Gh,
-% Font height minus Glyph Y offset minus Glyph bitmap data boundingbox
-% height.
-
-text_intervals( _, _, _, [], Out) -> lists:flatten(Out);
-text_intervals({Xtl,Ytl}, Fh, Font, [Code|Chars], Out) ->
- {{_Gw, Gh, Gx0, Gy0, Gxm}, LSs} = egd_font:glyph(Font, Code),
- % Set offset points from translation matrix to point in TeInVe.
- Yo = Fh - Gh + Gy0,
- GLSs = text_intervals_vertical({Xtl+Gx0,Ytl+Yo},LSs, []),
- text_intervals({Xtl+Gxm,Ytl}, Fh, Font, Chars, [GLSs|Out]).
-
-text_intervals_vertical( _, [], Out) -> Out;
-text_intervals_vertical({Xtl, Ytl}, [LS|LSs], Out) ->
- H = lists:foldl(
- fun ({Xl,Xr}, RLSs) ->
- [{Ytl, Xl + Xtl, Xr + Xtl}|RLSs]
- end, [], LS),
- text_intervals_vertical({Xtl, Ytl+1}, LSs, [H|Out]).
-
-
-%%% E. PostScript implementation
-
-eps(#image{ objects = Os, width = W, height = H}) ->
- list_to_binary([eps_header(W,H),eps_objects(H,Os),eps_footer()]).
-
-eps_objects(H,Os) -> eps_objects(H,Os, []).
-eps_objects(_,[], Out) -> lists:flatten(Out);
-eps_objects(H,[O|Os], Out) -> eps_objects(H,Os, [eps_object(H,O)|Out]).
-
-eps_object(H,#image_object{ type = text_horizontal, internals = {_Font,Text}, points = [{X,Y}], color={R,G,B,_}}) ->
- s("/Times-Roman findfont\n14 scalefont\nsetfont\n~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p moveto\n(~s) show~n",
- [R,G,B,X,H-(Y + 10), Text]);
-eps_object(H,#image_object{ type = filled_ellipse, points = [{X1,Y1p},{X2,Y2p}], color={R,G,B,_}}) ->
- Y1 = H - Y1p,
- Y2 = H - Y2p,
- Xr = trunc((X2-X1)/2),
- Yr = trunc((Y2-Y1)/2),
- Cx = X1 + Xr,
- Cy = Y1 + Yr,
- s("~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p ~p ~p 0 360 ellipse fill\n",
- [R,G,B,Cx,Cy,Xr,Yr]);
-eps_object(H,#image_object{ type = arc, points = [P0, P1], internals = D, color={R,G,B,_}}) ->
- Es = egd_primitives:arc_to_edges(P0, P1, D),
- [s("~.4f ~.4f ~.4f setrgbcolor\n", [R,G,B])|lists:foldl(fun
- ({{X1,Y1},{X2,Y2}}, Eps) ->
- [s("newpath\n~p ~p moveto\n~p ~p lineto\n1 setlinewidth\nstroke\n", [X1,H-Y1,X2,H-Y2])|Eps]
- end, [], Es)];
-
-eps_object(H,#image_object{ type = line, points = [{X1,Y1}, {X2,Y2}], color={R,G,B,_}}) ->
- s("~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p moveto\n~p ~p lineto\n1 setlinewidth\nstroke\n",
- [R,G,B,X1,H-Y1,X2,H-Y2]);
-eps_object(H,#image_object{ type = rectangle, points = [{X1,Y1}, {X2,Y2}], color={R,G,B,_}}) ->
- s("~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p moveto\n~p ~p lineto\n~p ~p lineto\n~p ~p lineto\n~p ~p lineto\n1 setlinewidth\nstroke\n",
- [R,G,B,X1,H-Y1,X2,H-Y1,X2,H-Y2,X1,H-Y2,X1,H-Y1]);
-eps_object(H,#image_object{ type = filled_rectangle, points = [{X1,Y1}, {X2,Y2}], color={R,G,B,_}}) ->
- s("~.4f ~.4f ~.4f setrgbcolor\nnewpath\n~p ~p moveto\n~p ~p lineto\n~p ~p lineto\n~p ~p lineto\n~p ~p lineto\nclosepath\nfill\n",
- [R,G,B,X1,H-Y1,X2,H-Y1,X2,H-Y2,X1,H-Y2,X1,H-Y1]);
-eps_object(_,_) -> "".
-
-s(Format, Terms) -> lists:flatten(io_lib:format(Format, Terms)).
-
-eps_header(W,H) ->
- s("%!PS-Adobe-3.0 EPSF-3.0\n%%Creator: Created by egd\n%%BoundingBox: 0 0 ~p ~p\n%%LanguageLevel: 2\n%%Pages: 1\n%%DocumentData: Clean7Bit\n",[W,H]) ++
- "%%BeginProlog\n/ellipse {7 dict begin\n/endangle exch def\n/startangle exch def\n/yradius exch def\n/xradius exch def\n/yC exch def\n/xC exch def\n"
- "/savematrix matrix currentmatrix def\nxC yC translate\nxradius yradius scale\n0 0 1 startangle endangle arc\nsavematrix setmatrix\nend\n} def\n"
- "%%EndProlog\n".
-
-eps_footer() ->
- "%%EOF\n".
diff --git a/lib/percept/src/percept.app.src b/lib/percept/src/percept.app.src
deleted file mode 100644
index ab0d9a4d90..0000000000
--- a/lib/percept/src/percept.app.src
+++ /dev/null
@@ -1,45 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-{application,percept, [
- {description, "PERCEPT Erlang Concurrency Profiling Tool"},
- {vsn, "%VSN%"},
- {modules, [
- egd,
- egd_font,
- egd_png,
- egd_primitives,
- egd_render,
- percept,
- percept_analyzer,
- percept_db,
- percept_graph,
- percept_html,
- percept_image
- ]},
- {registered, [percept_db,percept_port]},
- {applications, [kernel,stdlib]},
- {env,[]},
- {runtime_dependencies, ["stdlib-2.0","runtime_tools-1.8.14","kernel-3.0",
- "inets-5.10","erts-9.0"]}
-]}.
-
-
-%% vim: syntax=erlang
diff --git a/lib/percept/src/percept.appup.src b/lib/percept/src/percept.appup.src
deleted file mode 100644
index 3ccdf8db2b..0000000000
--- a/lib/percept/src/percept.appup.src
+++ /dev/null
@@ -1,22 +0,0 @@
-%% -*- erlang -*-
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-{"%VSN%",
- [{<<".*">>,[{restart_application, percept}]}],
- [{<<".*">>,[{restart_application, percept}]}]
-}.
diff --git a/lib/percept/src/percept.erl b/lib/percept/src/percept.erl
deleted file mode 100644
index 25c6ae19b1..0000000000
--- a/lib/percept/src/percept.erl
+++ /dev/null
@@ -1,337 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%%
-%% @doc Percept - Erlang Concurrency Profiling Tool
-%%
-%% This module provides the user interface for the application.
-%%
-
--module(percept).
--behaviour(application).
--export([profile/1,
- profile/2,
- profile/3,
- stop_profile/0,
- start_webserver/0,
- start_webserver/1,
- stop_webserver/0,
- stop_webserver/1,
- analyze/1,
- % Application behaviour
- start/2,
- stop/1]).
-
-
--include("percept.hrl").
-
-%%==========================================================================
-%% Type definitions
-%%==========================================================================
-
-%% @type percept_option() = procs | ports | exclusive
-
--type percept_option() :: 'procs' | 'ports' | 'exclusive' | 'scheduler'.
-
-%%==========================================================================
-%% Application callback functions
-%%==========================================================================
-
-%% @spec start(Type, Args) -> {started, Hostname, Port} | {error, Reason}
-%% @doc none
-%% @hidden
-
-start(_Type, _Args) ->
- %% start web browser service
- start_webserver(0).
-
-%% @spec stop(State) -> ok
-%% @doc none
-%% @hidden
-
-stop(_State) ->
- %% stop web browser service
- stop_webserver(0).
-
-%%==========================================================================
-%% Interface functions
-%%==========================================================================
-
-%% @spec profile(Filename::string()) -> {ok, Port} | {already_started, Port}
-%% @see percept_profile
-
-%% profiling
-
--spec profile(Filename :: file:filename()) ->
- {'ok', port()} | {'already_started', port()}.
-
-profile(Filename) ->
- percept_profile:start(Filename, [procs]).
-
-%% @spec profile(Filename::string(), [percept_option()]) -> {ok, Port} | {already_started, Port}
-%% @see percept_profile
-
--spec profile(Filename :: file:filename(),
- Options :: [percept_option()]) ->
- {'ok', port()} | {'already_started', port()}.
-
-profile(Filename, Options) ->
- percept_profile:start(Filename, Options).
-
-%% @spec profile(Filename::string(), MFA::mfa(), [percept_option()]) -> ok | {already_started, Port} | {error, not_started}
-%% @see percept_profile
-
--spec profile(Filename :: file:filename(),
- Entry :: {atom(), atom(), list()},
- Options :: [percept_option()]) ->
- 'ok' | {'already_started', port()} | {'error', 'not_started'}.
-
-profile(Filename, MFA, Options) ->
- percept_profile:start(Filename, MFA, Options).
-
--spec stop_profile() -> 'ok' | {'error', 'not_started'}.
-
-%% @spec stop_profile() -> ok | {'error', 'not_started'}
-%% @see percept_profile
-
-stop_profile() ->
- percept_profile:stop().
-
-%% @spec analyze(string()) -> ok | {error, Reason}
-%% @doc Analyze file.
-
--spec analyze(Filename :: file:filename()) ->
- 'ok' | {'error', any()}.
-
-analyze(Filename) ->
- case percept_db:start() of
- {started, DB} ->
- parse_and_insert(Filename,DB);
- {restarted, DB} ->
- parse_and_insert(Filename,DB)
- end.
-
-%% @spec start_webserver() -> {started, Hostname, Port} | {error, Reason}
-%% Hostname = string()
-%% Port = integer()
-%% Reason = term()
-%% @doc Starts webserver.
-
--spec start_webserver() ->
- {'started', string(), pos_integer()} | {'error', any()}.
-
-start_webserver() ->
- start_webserver(0).
-
-%% @spec start_webserver(integer()) -> {started, Hostname, AssignedPort} | {error, Reason}
-%% Hostname = string()
-%% AssignedPort = integer()
-%% Reason = term()
-%% @doc Starts webserver. If port number is 0, an available port number will
-%% be assigned by inets.
-
--spec start_webserver(Port :: non_neg_integer()) ->
- {'started', string(), pos_integer()} | {'error', any()}.
-
-start_webserver(Port) when is_integer(Port) ->
- ok = ensure_loaded(percept),
- case whereis(percept_httpd) of
- undefined ->
- {ok, Config} = get_webserver_config("percept", Port),
- ok = application:ensure_started(inets),
- case inets:start(httpd, Config) of
- {ok, Pid} ->
- AssignedPort = find_service_port_from_pid(inets:services_info(), Pid),
- {ok, Host} = inet:gethostname(),
- %% workaround until inets can get me a service from a name.
- Mem = spawn(fun() -> service_memory({Pid,AssignedPort,Host}) end),
- register(percept_httpd, Mem),
- {started, Host, AssignedPort};
- {error, Reason} ->
- {error, {inets, Reason}}
- end;
- _ ->
- {error, already_started}
- end.
-
-%% @spec stop_webserver() -> ok | {error, not_started}
-%% @doc Stops webserver.
-
-stop_webserver() ->
- case whereis(percept_httpd) of
- undefined ->
- {error, not_started};
- Pid ->
- do_stop([], Pid)
- end.
-
-do_stop([], Pid)->
- Pid ! {self(), get_port},
- Port = receive P -> P end,
- do_stop(Port, Pid);
-do_stop(Port, [])->
- case whereis(percept_httpd) of
- undefined ->
- {error, not_started};
- Pid ->
- do_stop(Port, Pid)
- end;
-do_stop(Port, Pid)->
- case find_service_pid_from_port(inets:services_info(), Port) of
- undefined ->
- {error, not_started};
- Pid2 ->
- Pid ! quit,
- inets:stop(httpd, Pid2)
- end.
-
-%% @spec stop_webserver(integer()) -> ok | {error, not_started}
-%% @doc Stops webserver of the given port.
-%% @hidden
-
-stop_webserver(Port) ->
- do_stop(Port,[]).
-
-%%==========================================================================
-%% Auxiliary functions
-%%==========================================================================
-
-%% parse_and_insert
-
-parse_and_insert(Filename, DB) ->
- io:format("Parsing: ~p ~n", [Filename]),
- T0 = erlang:monotonic_time(millisecond),
- Pid = dbg:trace_client(file, Filename, mk_trace_parser(self())),
- Ref = erlang:monitor(process, Pid),
- parse_and_insert_loop(Filename, Pid, Ref, DB, T0).
-
-parse_and_insert_loop(Filename, Pid, Ref, DB, T0) ->
- receive
- {'DOWN',Ref,process, Pid, noproc} ->
- io:format("Incorrect file or malformed trace file: ~p~n", [Filename]),
- {error, file};
- {parse_complete, {Pid, Count}} ->
- receive {'DOWN', Ref, process, Pid, normal} -> ok after 0 -> ok end,
- DB ! {action, consolidate},
- T1 = erlang:monotonic_time(millisecond),
- io:format("Parsed ~w entries in ~w ms.~n", [Count, T1 - T0]),
- io:format(" ~p created processes.~n", [length(percept_db:select({information, procs}))]),
- io:format(" ~p opened ports.~n", [length(percept_db:select({information, ports}))]),
- ok;
- {'DOWN',Ref, process, Pid, normal} -> parse_and_insert_loop(Filename, Pid, Ref, DB, T0);
- {'DOWN',Ref, process, Pid, Reason} -> {error, Reason}
- end.
-
-mk_trace_parser(Pid) ->
- {fun trace_parser/2, {0, Pid}}.
-
-trace_parser(end_of_trace, {Count, Pid}) ->
- Pid ! {parse_complete, {self(),Count}},
- receive
- {ack, Pid} ->
- ok
- end;
-trace_parser(Trace, {Count, Pid}) ->
- percept_db:insert(Trace),
- {Count + 1, Pid}.
-
-find_service_pid_from_port([], _) ->
- undefined;
-find_service_pid_from_port([{_, Pid, Options} | Services], Port) ->
- case lists:keyfind(port, 1, Options) of
- false ->
- find_service_pid_from_port(Services, Port);
- {port, Port} ->
- Pid
- end.
-
-find_service_port_from_pid([], _) ->
- undefined;
-find_service_port_from_pid([{_, Pid, Options} | _], Pid) ->
- case lists:keyfind(port, 1, Options) of
- false ->
- undefined;
- {port, Port} ->
- Port
- end;
-find_service_port_from_pid([{_, _, _} | Services], Pid) ->
- find_service_port_from_pid(Services, Pid).
-
-%% service memory
-
-service_memory({Pid, Port, Host}) ->
- receive
- quit ->
- ok;
- {Reply, get_port} ->
- Reply ! Port,
- service_memory({Pid, Port, Host});
- {Reply, get_host} ->
- Reply ! Host,
- service_memory({Pid, Port, Host});
- {Reply, get_pid} ->
- Reply ! Pid,
- service_memory({Pid, Port, Host})
- end.
-
-% Create config data for the webserver
-
-get_webserver_config(Servername, Port) when is_list(Servername), is_integer(Port) ->
- Path = code:priv_dir(percept),
- Root = filename:join([Path, "server_root"]),
- MimeTypesFile = filename:join([Root,"conf","mime.types"]),
- {ok, MimeTypes} = httpd_conf:load_mime_types(MimeTypesFile),
- Config = [
- % Roots
- {server_root, Root},
- {document_root,filename:join([Root, "htdocs"])},
-
- % Aliases
- {eval_script_alias,{"/eval",[io]}},
- {erl_script_alias,{"/cgi-bin",[percept_graph,percept_html,io]}},
- {script_alias,{"/cgi-bin/", filename:join([Root, "cgi-bin"])}},
- {alias,{"/javascript/",filename:join([Root, "scripts"]) ++ "/"}},
- {alias,{"/images/", filename:join([Root, "images"]) ++ "/"}},
- {alias,{"/css/", filename:join([Root, "css"]) ++ "/"}},
-
- % Configs
- {default_type,"text/plain"},
- {directory_index,["index.html"]},
- {mime_types, MimeTypes},
- {modules,[mod_alias,
- mod_esi,
- mod_actions,
- mod_cgi,
- mod_dir,
- mod_get,
- mod_head
- ]},
- {com_type,ip_comm},
- {server_name, Servername},
- {bind_address, any},
- {port, Port}],
- {ok, Config}.
-
-ensure_loaded(App) ->
- case application:load(App) of
- ok -> ok;
- {error,{already_loaded,App}} -> ok;
- Error -> Error
- end.
diff --git a/lib/percept/src/percept.hrl b/lib/percept/src/percept.hrl
deleted file mode 100644
index 58926cd1b4..0000000000
--- a/lib/percept/src/percept.hrl
+++ /dev/null
@@ -1,53 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--define(seconds(EndTs,StartTs), timer:now_diff(EndTs, StartTs)/1000000).
-
-%%% ------------------- %%%
-%%% Type definitions %%%
-%%% ------------------- %%%
-
--type timestamp() :: {non_neg_integer(), non_neg_integer(), non_neg_integer()}.
--type true_mfa() :: {atom(), atom(), byte() | list()}.
--type state() :: 'active' | 'inactive'.
--type scheduler_id() :: {'scheduler_id', non_neg_integer()}.
-
-%%% ------------------- %%%
-%%% Records %%%
-%%% ------------------- %%%
-
--record(activity, {
- timestamp ,%:: timestamp() ,
- id ,%:: pid() | port() | scheduler_id(),
- state = undefined ,%:: state() | 'undefined',
- where = undefined ,%:: true_mfa() | 'undefined',
- runnable_count = 0 %:: non_neg_integer()
- }).
-
--record(information, {
- id ,%:: pid() | port(),
- name = undefined ,%:: atom() | string() | 'undefined',
- entry = undefined ,%:: true_mfa() | 'undefined',
- start = undefined ,%:: timestamp() | 'undefined',
- stop = undefined ,%:: timestamp() | 'undefined',
- parent = undefined ,%:: pid() | 'undefined',
- children = [] %:: [pid()]
- }).
-
diff --git a/lib/percept/src/percept_analyzer.erl b/lib/percept/src/percept_analyzer.erl
deleted file mode 100644
index f38d026905..0000000000
--- a/lib/percept/src/percept_analyzer.erl
+++ /dev/null
@@ -1,368 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%% @doc Utility functions to operate on percept data. These functions should
-%% be considered experimental. Behaviour may change in future releases.
-
--module(percept_analyzer).
--export([
- minmax/1,
- waiting_activities/1,
- activities2count/2,
- activities2count/3,
- activities2count2/2,
- analyze_activities/2,
- runnable_count/1,
- runnable_count/2,
- seconds2ts/2,
- minmax_activities/2,
- mean/1
- ]).
-
--include("percept.hrl").
-
-%%==========================================================================
-%%
-%% Interface functions
-%%
-%%==========================================================================
-
-
-%% @spec minmax([{X, Y}]) -> {MinX, MinY, MaxX, MaxY}
-%% X = number()
-%% Y = number()
-%% MinX = number()
-%% MinY = number()
-%% MaxX = number()
-%% MaxY = number()
-%% @doc Returns the min and max of a set of 2-dimensional numbers.
-
-minmax(Data) ->
- Xs = [ X || {X,_Y} <- Data],
- Ys = [ Y || {_X, Y} <- Data],
- {lists:min(Xs), lists:min(Ys), lists:max(Xs), lists:max(Ys)}.
-
-%% @spec mean([number()]) -> {Mean, StdDev, N}
-%% Mean = float()
-%% StdDev = float()
-%% N = integer()
-%% @doc Calculates the mean and the standard deviation of a set of
-%% numbers.
-
-mean([]) -> {0, 0, 0};
-mean([Value]) -> {Value, 0, 1};
-mean(List) -> mean(List, {0, 0, 0}).
-
-mean([], {Sum, SumSquare, N}) ->
- Mean = Sum / N,
- StdDev = math:sqrt((SumSquare - Sum*Sum/N)/(N - 1)),
- {Mean, StdDev, N};
-mean([Value | List], {Sum, SumSquare, N}) ->
- mean(List, {Sum + Value, SumSquare + Value*Value, N + 1}).
-
-
-
-activities2count2(Acts, StartTs) ->
- Start = inactive_start_states(Acts),
- activities2count2(Acts, StartTs, Start, []).
-
-activities2count2([], _, _, Out) -> lists:reverse(Out);
-activities2count2([#activity{ id = Id, timestamp = Ts, state = active} | Acts], StartTs, {Proc,Port}, Out) when is_pid(Id) ->
- activities2count2(Acts, StartTs, {Proc + 1, Port}, [{?seconds(Ts, StartTs), Proc + 1, Port}|Out]);
-activities2count2([#activity{ id = Id, timestamp = Ts, state = inactive} | Acts], StartTs, {Proc,Port}, Out) when is_pid(Id) ->
- activities2count2(Acts, StartTs, {Proc - 1, Port}, [{?seconds(Ts, StartTs), Proc - 1, Port}|Out]);
-activities2count2([#activity{ id = Id, timestamp = Ts, state = active} | Acts], StartTs, {Proc,Port}, Out) when is_port(Id) ->
- activities2count2(Acts, StartTs, {Proc, Port + 1}, [{?seconds(Ts, StartTs), Proc, Port + 1}|Out]);
-activities2count2([#activity{ id = Id, timestamp = Ts, state = inactive} | Acts], StartTs, {Proc,Port}, Out) when is_port(Id) ->
- activities2count2(Acts, StartTs, {Proc, Port - 1}, [{?seconds(Ts, StartTs), Proc, Port - 1}|Out]).
-
-
-inactive_start_states(Acts) ->
- D = activity_start_states(Acts, dict:new()),
- dict:fold(fun
- (K, inactive, {Procs, Ports}) when is_pid(K) -> {Procs + 1, Ports};
- (K, inactive, {Procs, Ports}) when is_port(K) -> {Procs, Ports + 1};
- (_, _, {Procs, Ports}) -> {Procs, Ports}
- end, {0,0}, D).
-activity_start_states([], D) -> D;
-activity_start_states([#activity{id = Id, state = State}|Acts], D) ->
- case dict:is_key(Id, D) of
- true -> activity_start_states(Acts, D);
- false -> activity_start_states(Acts, dict:store(Id, State, D))
- end.
-
-
-
-
-%% @spec activities2count(#activity{}, timestamp()) -> Result
-%% Result = [{Time, ProcessCount, PortCount}]
-%% Time = float()
-%% ProcessCount = integer()
-%% PortCount = integer()
-%% @doc Calculate the resulting active processes and ports during
-%% the activity interval.
-%% Also checks active/inactive consistency.
-%% A task will always begin with an active state and end with an inactive state.
-
-activities2count(Acts, StartTs) when is_list(Acts) -> activities2count(Acts, StartTs, separated).
-
-activities2count(Acts, StartTs, Type) when is_list(Acts) -> activities2count_loop(Acts, {StartTs, {0,0}}, Type, []).
-
-activities2count_loop([], _, _, Out) -> lists:reverse(Out);
-activities2count_loop(
- [#activity{ timestamp = Ts, id = Id, runnable_count = Rc} | Acts],
- {StartTs, {Procs, Ports}}, separated, Out) ->
-
- Time = ?seconds(Ts, StartTs),
- case Id of
- Id when is_port(Id) ->
- Entry = {Time, Procs, Rc},
- activities2count_loop(Acts, {StartTs, {Procs, Rc}}, separated, [Entry | Out]);
- Id when is_pid(Id) ->
- Entry = {Time, Rc, Ports},
- activities2count_loop(Acts, {StartTs, {Rc, Ports}}, separated, [Entry | Out]);
- _ ->
- activities2count_loop(Acts, {StartTs,{Procs, Ports}}, separated, Out)
- end;
-activities2count_loop(
- [#activity{ timestamp = Ts, id = Id, runnable_count = Rc} | Acts],
- {StartTs, {Procs, Ports}}, summated, Out) ->
-
- Time = ?seconds(Ts, StartTs),
- case Id of
- Id when is_port(Id) ->
- Entry = {Time, Procs + Rc},
- activities2count_loop(Acts, {StartTs, {Procs, Rc}}, summated, [Entry | Out]);
- Id when is_pid(Id) ->
- Entry = {Time, Rc + Ports},
- activities2count_loop(Acts, {StartTs, {Rc, Ports}}, summated, [Entry | Out])
- end.
-
-%% @spec waiting_activities([#activity{}]) -> FunctionList
-%% FunctionList = [{Seconds, Mfa, {Mean, StdDev, N}}]
-%% Seconds = float()
-%% Mfa = mfa()
-%% Mean = float()
-%% StdDev = float()
-%% N = integer()
-%% @doc Calculates the time, both average and total, that a process has spent
-%% in a receive state at specific function. However, if there are multiple receives
-%% in a function it cannot differentiate between them.
-
-waiting_activities(Activities) ->
- ListedMfas = waiting_activities_mfa_list(Activities, []),
- Unsorted = lists:foldl(
- fun (Mfa, MfaList) ->
- {Total, WaitingTimes} = get({waiting_mfa, Mfa}),
-
- % cleanup
- erlang:erase({waiting_mfa, Mfa}),
-
- % statistics of receive waiting places
- Stats = mean(WaitingTimes),
-
- [{Total, Mfa, Stats} | MfaList]
- end, [], ListedMfas),
- lists:sort(fun ({A,_,_},{B,_,_}) ->
- if
- A > B -> true;
- true -> false
- end
- end, Unsorted).
-
-
-%% Generate lists of receive waiting times per mfa
-%% Out:
-%% ListedMfas = [mfa()]
-%% Intrisnic:
-%% get({waiting, mfa()}) ->
-%% [{waiting, mfa()}, {Total, [WaitingTime]})
-%% WaitingTime = float()
-
-waiting_activities_mfa_list([], ListedMfas) -> ListedMfas;
-waiting_activities_mfa_list([Activity|Activities], ListedMfas) ->
- #activity{id = Pid, state = Act, timestamp = Time, where = MFA} = Activity,
- case Act of
- active ->
- waiting_activities_mfa_list(Activities, ListedMfas);
- inactive ->
- % Want to know how long the wait is in a receive,
- % it is given via the next activity
- case Activities of
- [] ->
- [Info] = percept_db:select(information, Pid),
- case Info#information.stop of
- undefined ->
- % get profile end time
- Waited = ?seconds(
- percept_db:select({system,stop_ts}),
- Time);
- Time2 ->
- Waited = ?seconds(Time2, Time)
- end,
- case get({waiting_mfa, MFA}) of
- undefined ->
- put({waiting_mfa, MFA}, {Waited, [Waited]}),
- [MFA | ListedMfas];
- {Total, TimedMfa} ->
- put({waiting_mfa, MFA}, {Total + Waited, [Waited | TimedMfa]}),
- ListedMfas
- end;
- [#activity{timestamp=Time2, id = Pid, state = active} | _ ] ->
- % Calculate waiting time
- Waited = ?seconds(Time2, Time),
- % Get previous entry
-
- case get({waiting_mfa, MFA}) of
- undefined ->
- % add entry to list
- put({waiting_mfa, MFA}, {Waited, [Waited]}),
- waiting_activities_mfa_list(Activities, [MFA|ListedMfas]);
- {Total, TimedMfa} ->
- put({waiting_mfa, MFA}, {Total + Waited, [Waited | TimedMfa]}),
- waiting_activities_mfa_list(Activities, ListedMfas)
- end;
- _ -> error
- end
- end.
-
-%% seconds2ts(Seconds, StartTs) -> TS
-%% In:
-%% Seconds = float()
-%% StartTs = timestamp()
-%% Out:
-%% TS = timestamp()
-
-%% @spec seconds2ts(float(), StartTs::{integer(),integer(),integer()}) -> timestamp()
-%% @doc Calculates a timestamp given a duration in seconds and a starting timestamp.
-
-seconds2ts(Seconds, {Ms, S, Us}) ->
- % Calculate mega seconds integer
- MsInteger = trunc(Seconds) div 1000000 ,
-
- % Calculate the reminder for seconds
- SInteger = trunc(Seconds),
-
- % Calculate the reminder for micro seconds
- UsInteger = trunc((Seconds - SInteger) * 1000000),
-
- % Wrap overflows
-
- UsOut = (UsInteger + Us) rem 1000000,
- SOut = ((SInteger + S) + (UsInteger + Us) div 1000000) rem 1000000,
- MsOut = (MsInteger+ Ms) + ((SInteger + S) + (UsInteger + Us) div 1000000) div 1000000,
-
- {MsOut, SOut, UsOut}.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%
-% Analyze interval for concurrency
-%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% @spec analyze_activities(integer(), [#activity{}]) -> [{integer(),#activity{}}]
-%% @hidden
-
-analyze_activities(Threshold, Activities) ->
- RunnableCount = runnable_count(Activities, 0),
- analyze_runnable_activities(Threshold, RunnableCount).
-
-
-%% runnable_count(Activities, StartValue) -> RunnableCount
-%% In:
-%% Activities = [activity()]
-%% StartValue = integer()
-%% Out:
-%% RunnableCount = [{integer(), activity()}]
-%% Purpose:
-%% Calculate the runnable count of a given interval of generic
-%% activities.
-
-%% @spec runnable_count([#activity{}]) -> [{integer(),#activity{}}]
-%% @hidden
-
-runnable_count(Activities) ->
- Threshold = runnable_count_threshold(Activities),
- runnable_count(Activities, Threshold, []).
-
-runnable_count_threshold(Activities) ->
- CountedActs = runnable_count(Activities, 0),
- Counts = [C || {C, _} <- CountedActs],
- Min = lists:min(Counts),
- 0 - Min.
-%% @spec runnable_count([#activity{}],integer()) -> [{integer(),#activity{}}]
-%% @hidden
-
-runnable_count(Activities, StartCount) when is_integer(StartCount) ->
- runnable_count(Activities, StartCount, []).
-runnable_count([], _ , Out) ->
- lists:reverse(Out);
-runnable_count([A | As], PrevCount, Out) ->
- case A#activity.state of
- active ->
- runnable_count(As, PrevCount + 1, [{PrevCount + 1, A} | Out]);
- inactive ->
- runnable_count(As, PrevCount - 1, [{PrevCount - 1, A} | Out])
- end.
-
-%% In:
-%% Threshold = integer(),
-%% RunnableActivities = [{Rc, activity()}]
-%% Rc = integer()
-
-analyze_runnable_activities(Threshold, RunnableActivities) ->
- analyze_runnable_activities(Threshold, RunnableActivities, []).
-
-analyze_runnable_activities( _z, [], Out) ->
- lists:reverse(Out);
-analyze_runnable_activities(Threshold, [{Rc, Act} | RunnableActs], Out) ->
- if
- Rc =< Threshold ->
- analyze_runnable_activities(Threshold, RunnableActs, [{Rc,Act} | Out]);
- true ->
- analyze_runnable_activities(Threshold, RunnableActs, Out)
- end.
-
-%% minmax_activity(Activities, Count) -> {Min, Max}
-%% In:
-%% Activities = [activity()]
-%% InitialCount = non_neg_integer()
-%% Out:
-%% {Min, Max}
-%% Min = non_neg_integer()
-%% Max = non_neg_integer()
-%% Purpose:
-%% Minimal and maximal activity during an activity interval.
-%% Initial activity count needs to be supplied.
-
-%% @spec minmax_activities([#activity{}], integer()) -> {integer(), integer()}
-%% @doc Calculates the minimum and maximum of runnable activites (processes
-% and ports) during the interval of reffered by the activity list.
-
-minmax_activities(Activities, Count) ->
- minmax_activities(Activities, Count, {Count, Count}).
-minmax_activities([], _, Out) ->
- Out;
-minmax_activities([A|Acts], Count, {Min, Max}) ->
- case A#activity.state of
- active ->
- minmax_activities(Acts, Count + 1, {Min, lists:max([Count + 1, Max])});
- inactive ->
- minmax_activities(Acts, Count - 1, {lists:min([Count - 1, Min]), Max})
- end.
diff --git a/lib/percept/src/percept_db.erl b/lib/percept/src/percept_db.erl
deleted file mode 100644
index 6cbe3ce022..0000000000
--- a/lib/percept/src/percept_db.erl
+++ /dev/null
@@ -1,780 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%%
-%% @doc Percept database.
-%%
-%%
-
--module(percept_db).
-
--export([start/0,
- stop/0,
- insert/1,
- select/2,
- select/1,
- consolidate/0]).
-
--include("percept.hrl").
--define(STOP_TIMEOUT, 1000).
-%%==========================================================================
-%% Type definitions
-%%==========================================================================
-
-%% @type activity_option() =
-%% {ts_min, timestamp()} |
-%% {ts_max, timestamp()} |
-%% {ts_exact, bool()} |
-%% {mfa, {atom(), atom(), byte()}} |
-%% {state, active | inactive} |
-%% {id, all | procs | ports | pid() | port()}
-
-%% @type scheduler_option() =
-%% {ts_min, timestamp()} |
-%% {ts_max, timestamp()} |
-%% {ts_exact, bool()} |
-%% {id, scheduler_id()}
-
-%% @type system_option() = start_ts | stop_ts
-
-%% @type information_option() =
-%% all | procs | ports | pid() | port()
-
-
-
-
-%%==========================================================================
-%% Interface functions
-%%==========================================================================
-
-%% @spec start() -> ok | {started, Pid} | {restarted, Pid}
-%% Pid = pid()
-%% @doc Starts or restarts the percept database.
-
--spec start() -> {'started', pid()} | {'restarted', pid()}.
-
-start() ->
- case erlang:whereis(percept_db) of
- undefined ->
- {started, do_start()};
- PerceptDB ->
- {restarted, restart(PerceptDB)}
- end.
-
-%% @spec restart(pid()) -> pid()
-%% @private
-%% @doc restarts the percept database.
-
--spec restart(pid())-> pid().
-
-restart(PerceptDB)->
- stop_sync(PerceptDB),
- do_start().
-
-%% @spec do_start() -> pid()
-%% @private
-%% @doc starts the percept database.
-
--spec do_start()-> pid().
-
-do_start()->
- Pid = spawn(fun() -> init_percept_db() end),
- erlang:register(percept_db, Pid),
- Pid.
-
-%% @spec stop() -> not_started | {stopped, Pid}
-%% Pid = pid()
-%% @doc Stops the percept database.
-
--spec stop() -> 'not_started' | {'stopped', pid()}.
-
-stop() ->
- case erlang:whereis(percept_db) of
- undefined ->
- not_started;
- Pid ->
- Pid ! {action, stop},
- {stopped, Pid}
- end.
-
-%% @spec stop_sync(pid()) -> true
-%% @private
-%% @doc Stops the percept database, with a synchronous call.
-
--spec stop_sync(pid()) -> true.
-
-stop_sync(Pid) ->
- MonitorRef = erlang:monitor(process, Pid),
- _ = stop(),
- receive
- {'DOWN', MonitorRef, _Type, Pid, _Info}->
- true
- after ?STOP_TIMEOUT->
- erlang:demonitor(MonitorRef, [flush]),
- exit(Pid, kill)
- end.
-
-%% @spec insert(tuple()) -> ok
-%% @doc Inserts a trace or profile message to the database.
-
-insert(Trace) ->
- percept_db ! {insert, Trace},
- ok.
-
-
-%% @spec select({atom(), Options}) -> Result
-%% @doc Synchronous call. Selects information based on a query.
-%%
-%% <p>Queries:</p>
-%% <pre>
-%% {system, Option}
-%% Option = system_option()
-%% Result = timestamp()
-%% {information, Options}
-%% Options = [information_option()]
-%% Result = [#information{}]
-%% {scheduler, Options}
-%% Options = [sceduler_option()]
-%% Result = [#activity{}]
-%% {activity, Options}
-%% Options = [activity_option()]
-%% Result = [#activity{}]
-%% </pre>
-%% <p>
-%% Note: selection of Id's are always OR all other options are considered AND.
-%% </p>
-
-select(Query) ->
- percept_db ! {select, self(), Query},
- receive {result, Match} -> Match end.
-
-%% @spec select(atom(), list()) -> Result
-%% @equiv select({Table,Options})
-
-select(Table, Options) ->
- percept_db ! {select, self(), {Table, Options}},
- receive {result, Match} -> Match end.
-
-%% @spec consolidate() -> Result
-%% @doc Checks timestamp and state-flow inconsistencies in the
-%% the database.
-
-consolidate() ->
- percept_db ! {action, consolidate},
- ok.
-
-%%==========================================================================
-%% Database loop
-%%==========================================================================
-
-init_percept_db() ->
- % Proc and Port information
- pdb_info = ets:new(pdb_info, [named_table, private, {keypos, #information.id}, set]),
-
- % Scheduler runnability
- pdb_scheduler = ets:new(pdb_scheduler, [named_table, private, {keypos, #activity.timestamp}, ordered_set]),
-
- % Process and Port runnability
- pdb_activity = ets:new(pdb_activity, [named_table, private, {keypos, #activity.timestamp}, ordered_set]),
-
- % System status
- pdb_system = ets:new(pdb_system, [named_table, private, {keypos, 1}, set]),
-
- % System warnings
- pdb_warnings = ets:new(pdb_warnings, [named_table, private, {keypos, 1}, ordered_set]),
- put(debug, 0),
- loop_percept_db().
-
-loop_percept_db() ->
- receive
- {insert, Trace} ->
- insert_trace(clean_trace(Trace)),
- loop_percept_db();
- {select, Pid, Query} ->
- Pid ! {result, select_query(Query)},
- loop_percept_db();
- {action, stop} ->
- stopped;
- {action, consolidate} ->
- consolidate_db(),
- loop_percept_db();
- {operate, Pid, {Table, {Fun, Start}}} ->
- Result = ets:foldl(Fun, Start, Table),
- Pid ! {result, Result},
- loop_percept_db();
- Unhandled ->
- io:format("loop_percept_db, unhandled query: ~p~n", [Unhandled]),
- loop_percept_db()
- end.
-
-%%==========================================================================
-%% Auxiliary functions
-%%==========================================================================
-
-%% cleans trace messages from external pids
-
-clean_trace(Trace) when is_tuple(Trace) -> list_to_tuple(clean_trace(tuple_to_list(Trace)));
-clean_trace(Trace) when is_list(Trace) -> clean_list(Trace, []);
-clean_trace(Trace) when is_pid(Trace) ->
- PidStr = pid_to_list(Trace),
- [_,P2,P3p] = string:tokens(PidStr,"."),
- P3 = lists:sublist(P3p, 1, length(P3p) - 1),
- erlang:list_to_pid("<0." ++ P2 ++ "." ++ P3 ++ ">");
-clean_trace(Trace) -> Trace.
-
-clean_list([], Out) -> lists:reverse(Out);
-clean_list([Element|Trace], Out) ->
- clean_list(Trace, [clean_trace(Element)|Out]).
-
-
-insert_trace(Trace) ->
- case Trace of
- {profile_start, Ts} ->
- update_system_start_ts(Ts),
- ok;
- {profile_stop, Ts} ->
- update_system_stop_ts(Ts),
- ok;
- %%% erlang:system_profile, option: runnable_procs
- %%% ---------------------------------------------
- {profile, Id, State, Mfa, TS} when is_pid(Id) ->
- % Update runnable count in activity and db
-
- case check_activity_consistency(Id, State) of
- invalid_state ->
- ignored;
- ok ->
- Rc = get_runnable_count(procs, State),
- % Update registered procs
- % insert proc activity
- update_activity(#activity{
- id = Id,
- state = State,
- timestamp = TS,
- runnable_count = Rc,
- where = Mfa}),
- ok
- end;
- %%% erlang:system_profile, option: runnable_ports
- %%% ---------------------------------------------
- {profile, Id, State, Mfa, TS} when is_port(Id) ->
- case check_activity_consistency(Id, State) of
- invalid_state ->
- ignored;
- ok ->
- % Update runnable count in activity and db
- Rc = get_runnable_count(ports, State),
-
- % Update registered ports
- % insert port activity
- update_activity(#activity{
- id = Id,
- state = State,
- timestamp = TS,
- runnable_count = Rc,
- where = Mfa}),
-
- ok
- end;
- %%% erlang:system_profile, option: scheduler
- {profile, scheduler, Id, State, Scheds, Ts} ->
- % insert scheduler activity
- update_scheduler(#activity{
- id = {scheduler, Id},
- state = State,
- timestamp = Ts,
- where = Scheds}),
- ok;
-
- %%% erlang:trace, option: procs
- %%% ---------------------------
- {trace_ts, Parent, spawn, Pid, Mfa, TS} ->
- InformativeMfa = mfa2informative(Mfa),
- % Update id_information
- update_information(#information{id = Pid, start = TS, parent = Parent, entry = InformativeMfa}),
- update_information_child(Parent, Pid),
- ok;
- {trace_ts, Pid, exit, _Reason, TS} ->
- % Update registered procs
-
- % Update id_information
- update_information(#information{id = Pid, stop = TS}),
-
- ok;
- {trace_ts, Pid, register, Name, _Ts} when is_pid(Pid) ->
- % Update id_information
- update_information(#information{id = Pid, name = Name}),
- ok;
- {trace_ts, Pid, register, Name, _Ts} when is_pid(Pid) ->
- % Update id_information
- update_information(#information{id = Pid, name = Name}),
- ok;
- {trace_ts, _Pid, unregister, _Name, _Ts} ->
- % Not implemented
- ok;
- {trace_ts, Pid, getting_unlinked, _Id, _Ts} when is_pid(Pid) ->
- % Update id_information
- ok;
- {trace_ts, Pid, getting_linked, _Id, _Ts} when is_pid(Pid)->
- % Update id_information
- ok;
- {trace_ts, Pid, link, _Id, _Ts} when is_pid(Pid)->
- % Update id_information
- ok;
- {trace_ts, Pid, unlink, _Id, _Ts} when is_pid(Pid) ->
- % Update id_information
- ok;
-
- %%% erlang:trace, option: ports
- %%% ----------------------------
- {trace_ts, Caller, open, Port, Driver, TS} ->
- % Update id_information
- update_information(#information{
- id = Port, entry = Driver, start = TS, parent = Caller}),
- ok;
- {trace_ts, Port, closed, _Reason, Ts} ->
- % Update id_information
- update_information(#information{id = Port, stop = Ts}),
- ok;
-
- Unhandled ->
- io:format("insert_trace, unhandled: ~p~n", [Unhandled])
- end.
-
-mfa2informative({erlang, apply, [M, F, Args]}) -> mfa2informative({M, F,Args});
-mfa2informative({erlang, apply, [Fun, Args]}) ->
- FunInfo = erlang:fun_info(Fun),
- M = case proplists:get_value(module, FunInfo, undefined) of
- [] -> undefined_fun_module;
- undefined -> undefined_fun_module;
- Module -> Module
- end,
- F = case proplists:get_value(name, FunInfo, undefined) of
- [] -> undefined_fun_function;
- undefined -> undefined_fun_function;
- Function -> Function
- end,
- mfa2informative({M, F, Args});
-mfa2informative(Mfa) -> Mfa.
-
-%% consolidate_db() -> bool()
-%% Purpose:
-%% Check start/stop time
-%% Activity consistency
-
-consolidate_db() ->
- io:format("Consolidating...~n"),
- % Check start/stop timestamps
- case select_query({system, start_ts}) of
- undefined ->
- Min = lists:min(list_all_ts()),
- update_system_start_ts(Min);
- _ -> ok
- end,
- case select_query({system, stop_ts}) of
- undefined ->
- Max = lists:max(list_all_ts()),
- update_system_stop_ts(Max);
- _ -> ok
- end,
- consolidate_runnability(),
- ok.
-
-consolidate_runnability() ->
- put({runnable, procs}, undefined),
- put({runnable, ports}, undefined),
- consolidate_runnability_loop(ets:first(pdb_activity)).
-
-consolidate_runnability_loop('$end_of_table') -> ok;
-consolidate_runnability_loop(Key) ->
- case ets:lookup(pdb_activity, Key) of
- [#activity{id = Id, state = State } = A] when is_pid(Id) ->
- Rc = get_runnable_count(procs, State),
- ets:insert(pdb_activity, A#activity{ runnable_count = Rc});
- [#activity{id = Id, state = State } = A] when is_port(Id) ->
- Rc = get_runnable_count(ports, State),
- ets:insert(pdb_activity, A#activity{ runnable_count = Rc});
- _ -> throw(consolidate)
- end,
- consolidate_runnability_loop(ets:next(pdb_activity, Key)).
-
-list_all_ts() ->
- ATs = [Act#activity.timestamp || Act <- select_query({activity, []})],
- STs = [Act#activity.timestamp || Act <- select_query({scheduler, []})],
- ITs = lists:flatten([
- [I#information.start,
- I#information.stop] ||
- I <- select_query({information, all})]),
- %% Filter out all undefined (non ts)
- [Elem || Elem = {_,_,_} <- ATs ++ STs ++ ITs].
-
-%% get_runnable_count(Type, State) -> RunnableCount
-%% In:
-%% Type = procs | ports
-%% State = active | inactive
-%% Out:
-%% RunnableCount = integer()
-%% Purpose:
-%% Keep track of the number of runnable ports and processes
-%% during the profile duration.
-
-get_runnable_count(Type, State) ->
- case {get({runnable, Type}), State} of
- {undefined, active} ->
- put({runnable, Type}, 1),
- 1;
- {N, active} ->
- put({runnable, Type}, N + 1),
- N + 1;
- {N, inactive} ->
- put({runnable, Type}, N - 1),
- N - 1;
- Unhandled ->
- io:format("get_runnable_count, unhandled ~p~n", [Unhandled]),
- Unhandled
- end.
-
-check_activity_consistency(Id, State) ->
- case get({previous_state, Id}) of
- State ->
- io:format("check_activity_consistency, state flow invalid.~n"),
- invalid_state;
- undefined when State == inactive ->
- invalid_state;
- _ ->
- put({previous_state, Id}, State),
- ok
- end.
-%%%
-%%% select_query
-%%% In:
-%%% Query = {Table, Option}
-%%% Table = system | activity | scheduler | information
-
-
-select_query(Query) ->
- case Query of
- {system, _ } ->
- select_query_system(Query);
- {activity, _ } ->
- select_query_activity(Query);
- {scheduler, _} ->
- select_query_scheduler(Query);
- {information, _ } ->
- select_query_information(Query);
- Unhandled ->
- io:format("select_query, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-%%% select_query_information
-
-select_query_information(Query) ->
- case Query of
- {information, all} ->
- ets:select(pdb_info, [{
- #information{ _ = '_'},
- [],
- ['$_']
- }]);
- {information, procs} ->
- ets:select(pdb_info, [{
- #information{ id = '$1', _ = '_'},
- [{is_pid, '$1'}],
- ['$_']
- }]);
- {information, ports} ->
- ets:select(pdb_info, [{
- #information{ id = '$1', _ = '_'},
- [{is_port, '$1'}],
- ['$_']
- }]);
- {information, Id} when is_port(Id) ; is_pid(Id) ->
- ets:select(pdb_info, [{
- #information{ id = Id, _ = '_'},
- [],
- ['$_']
- }]);
- Unhandled ->
- io:format("select_query_information, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-%%% select_query_scheduler
-
-select_query_scheduler(Query) ->
- case Query of
- {scheduler, Options} when is_list(Options) ->
- Head = #activity{
- timestamp = '$1',
- id = '$2',
- state = '$3',
- where = '$4',
- _ = '_'},
- Body = ['$_'],
- % We don't need id's
- {Constraints, _ } = activity_ms_and(Head, Options, [], []),
- ets:select(pdb_scheduler, [{Head, Constraints, Body}]);
- Unhandled ->
- io:format("select_query_scheduler, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-%%% select_query_system
-
-select_query_system(Query) ->
- case Query of
- {system, start_ts} ->
- case ets:lookup(pdb_system, {system, start_ts}) of
- [] -> undefined;
- [{{system, start_ts}, StartTS}] -> StartTS
- end;
- {system, stop_ts} ->
- case ets:lookup(pdb_system, {system, stop_ts}) of
- [] -> undefined;
- [{{system, stop_ts}, StopTS}] -> StopTS
- end;
- Unhandled ->
- io:format("select_query_system, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-%%% select_query_activity
-
-select_query_activity(Query) ->
- case Query of
- {activity, Options} when is_list(Options) ->
- case lists:member({ts_exact, true},Options) of
- true ->
- case catch select_query_activity_exact_ts(Options) of
- {'EXIT', Reason} ->
- io:format(" - select_query_activity [ catch! ]: ~p~n", [Reason]),
- [];
- Match ->
- Match
- end;
- false ->
- MS = activity_ms(Options),
- case catch ets:select(pdb_activity, MS) of
- {'EXIT', Reason} ->
- io:format(" - select_query_activity [ catch! ]: ~p~n", [Reason]),
- [];
- Match ->
- Match
- end
- end;
- Unhandled ->
- io:format("select_query_activity, unhandled: ~p~n", [Unhandled]),
- []
- end.
-
-select_query_activity_exact_ts(Options) ->
- case { proplists:get_value(ts_min, Options, undefined), proplists:get_value(ts_max, Options, undefined) } of
- {undefined, undefined} -> [];
- {undefined, _ } -> [];
- {_ , undefined} -> [];
- {TsMin , TsMax } ->
- % Remove unwanted options
- Opts = lists_filter([ts_exact], Options),
- Ms = activity_ms(Opts),
- case ets:select(pdb_activity, Ms) of
- % no entries within interval
- [] ->
- Opts2 = lists_filter([ts_max, ts_min], Opts) ++ [{ts_min, TsMax}],
- Ms2 = activity_ms(Opts2),
- case ets:select(pdb_activity, Ms2, 1) of
- '$end_of_table' -> [];
- {[E], _} ->
- [PrevAct] = ets:lookup(pdb_activity, ets:prev(pdb_activity, E#activity.timestamp)),
- [PrevAct#activity{ timestamp = TsMin} , E]
- end;
- Acts ->
- [Head| _] = Acts,
- if
- Head#activity.timestamp == TsMin -> Acts;
- true ->
- PrevTs = ets:prev(pdb_activity, Head#activity.timestamp),
- case ets:lookup(pdb_activity, PrevTs) of
- [] -> Acts;
- [PrevAct] -> [PrevAct#activity{timestamp = TsMin}|Acts]
- end
- end
- end
- end.
-
-lists_filter([], Options) -> Options;
-lists_filter([D|Ds], Options) ->
- lists_filter(Ds, lists:filter(
- fun ({Pred, _}) ->
- if
- Pred == D -> false;
- true -> true
- end
- end, Options)).
-
-% Options:
-% {ts_min, timestamp()}
-% {ts_max, timestamp()}
-% {mfa, mfa()}
-% {state, active | inactive}
-% {id, all | procs | ports | pid() | port()}
-%
-% All options are regarded as AND expect id which are regarded as OR
-% For example: [{ts_min, TS1}, {ts_max, TS2}, {id, PID1}, {id, PORT1}] would be
-% ({ts_min, TS1} and {ts_max, TS2} and {id, PID1}) or
-% ({ts_min, TS1} and {ts_max, TS2} and {id, PORT1}).
-
-activity_ms(Opts) ->
- % {activity, Timestamp, State, Mfa}
- Head = #activity{
- timestamp = '$1',
- id = '$2',
- state = '$3',
- where = '$4',
- _ = '_'},
-
- {Conditions, IDs} = activity_ms_and(Head, Opts, [], []),
- Body = ['$_'],
-
- lists:foldl(
- fun (Option, MS) ->
- case Option of
- {id, ports} ->
- [{Head, [{is_port, Head#activity.id} | Conditions], Body} | MS];
- {id, procs} ->
- [{Head,[{is_pid, Head#activity.id} | Conditions], Body} | MS];
- {id, ID} when is_pid(ID) ; is_port(ID) ->
- [{Head,[{'==', Head#activity.id, ID} | Conditions], Body} | MS];
- {id, all} ->
- [{Head, Conditions,Body} | MS];
- _ ->
- io:format("activity_ms id dropped ~p~n", [Option]),
- MS
- end
- end, [], IDs).
-
-activity_ms_and(_, [], Constraints, []) ->
- {Constraints, [{id, all}]};
-activity_ms_and(_, [], Constraints, IDs) ->
- {Constraints, IDs};
-activity_ms_and(Head, [Opt|Opts], Constraints, IDs) ->
- case Opt of
- {ts_min, Min} ->
- activity_ms_and(Head, Opts,
- [{'>=', Head#activity.timestamp, {Min}} | Constraints], IDs);
- {ts_max, Max} ->
- activity_ms_and(Head, Opts,
- [{'=<', Head#activity.timestamp, {Max}} | Constraints], IDs);
- {id, ID} ->
- activity_ms_and(Head, Opts,
- Constraints, [{id, ID} | IDs]);
- {state, State} ->
- activity_ms_and(Head, Opts,
- [{'==', Head#activity.state, State} | Constraints], IDs);
- {mfa, Mfa} ->
- activity_ms_and(Head, Opts,
- [{'==', Head#activity.where, {Mfa}}| Constraints], IDs);
- _ ->
- io:format("activity_ms_and option dropped ~p~n", [Opt]),
- activity_ms_and(Head, Opts, Constraints, IDs)
- end.
-
-% Information = information()
-
-%%%
-%%% update_information
-%%%
-
-
-update_information(#information{id = Id} = NewInfo) ->
- case ets:lookup(pdb_info, Id) of
- [] ->
- ets:insert(pdb_info, NewInfo),
- ok;
- [Info] ->
- % Remake NewInfo and Info to lists then substitute
- % old values for new values that are not undefined or empty lists.
-
- {_, Result} = lists:foldl(
- fun (InfoElem, {[NewInfoElem | Tail], Out}) ->
- case NewInfoElem of
- undefined ->
- {Tail, [InfoElem | Out]};
- [] ->
- {Tail, [InfoElem | Out]};
- NewInfoElem ->
- {Tail, [NewInfoElem | Out]}
- end
- end, {tuple_to_list(NewInfo), []}, tuple_to_list(Info)),
- ets:insert(pdb_info, list_to_tuple(lists:reverse(Result))),
- ok
- end.
-
-update_information_child(Id, Child) ->
- case ets:lookup(pdb_info, Id) of
- [] ->
- ets:insert(pdb_info,#information{
- id = Id,
- children = [Child]}),
- ok;
- [I] ->
- ets:insert(pdb_info,I#information{children = [Child | I#information.children]}),
- ok
- end.
-
-%%%
-%%% update_activity
-%%%
-update_scheduler(Activity) ->
- ets:insert(pdb_scheduler, Activity).
-
-update_activity(Activity) ->
- ets:insert(pdb_activity, Activity).
-
-%%%
-%%% update_system_ts
-%%%
-
-update_system_start_ts(TS) ->
- case ets:lookup(pdb_system, {system, start_ts}) of
- [] ->
- ets:insert(pdb_system, {{system, start_ts}, TS});
- [{{system, start_ts}, StartTS}] ->
- DT = ?seconds(StartTS, TS),
- if
- DT > 0.0 -> ets:insert(pdb_system, {{system, start_ts}, TS});
- true -> ok
- end;
- Unhandled ->
- io:format("update_system_start_ts, unhandled ~p ~n", [Unhandled])
- end.
-
-update_system_stop_ts(TS) ->
- case ets:lookup(pdb_system, {system, stop_ts}) of
- [] ->
- ets:insert(pdb_system, {{system, stop_ts}, TS});
- [{{system, stop_ts}, StopTS}] ->
- DT = ?seconds(StopTS, TS),
- if
- DT < 0.0 -> ets:insert(pdb_system, {{system, stop_ts}, TS});
- true -> ok
- end;
- Unhandled ->
- io:format("update_system_stop_ts, unhandled ~p ~n", [Unhandled])
- end.
diff --git a/lib/percept/src/percept_graph.erl b/lib/percept/src/percept_graph.erl
deleted file mode 100644
index e5bbaca2b4..0000000000
--- a/lib/percept/src/percept_graph.erl
+++ /dev/null
@@ -1,134 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
-%% @doc Interface for CGI request on graphs used by percept. The module exports two functions that are implementations for ESI callbacks used by the httpd server. See http://www.erlang.org//doc/apps/inets/index.html.
-
--module(percept_graph).
--export([proc_lifetime/3, graph/3, scheduler_graph/3, activity/3, percentage/3]).
-
--include("percept.hrl").
--include_lib("kernel/include/file.hrl").
-
-%% API
-
-%% graph
-%% @spec graph(SessionID, Env, Input) -> term()
-%% @doc An ESI callback implementation used by the httpd server.
-%%
-
-graph(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(graph(Env, Input))).
-
-%% activity
-%% @spec activity(SessionID, Env, Input) -> term()
-%% @doc An ESI callback implementation used by the httpd server.
-
-activity(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(activity_bar(Env, Input))).
-
-proc_lifetime(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(proc_lifetime(Env, Input))).
-
-percentage(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(percentage(Env,Input))).
-
-scheduler_graph(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, binary_to_list(scheduler_graph(Env, Input))).
-
-graph(_Env, Input) ->
- Query = httpd:parse_query(Input),
- RangeMin = percept_html:get_option_value("range_min", Query),
- RangeMax = percept_html:get_option_value("range_max", Query),
- Pids = percept_html:get_option_value("pids", Query),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
-
- % Convert Pids to id option list
- IDs = [ {id, ID} || ID <- Pids],
-
- % seconds2ts
- StartTs = percept_db:select({system, start_ts}),
- TsMin = percept_analyzer:seconds2ts(RangeMin, StartTs),
- TsMax = percept_analyzer:seconds2ts(RangeMax, StartTs),
-
- Options = [{ts_min, TsMin},{ts_max, TsMax} | IDs],
-
- Acts = percept_db:select({activity, Options}),
- Counts = case IDs of
- [] -> percept_analyzer:activities2count(Acts, StartTs);
- _ -> percept_analyzer:activities2count2(Acts, StartTs)
- end,
-
- percept_image:graph(Width, Height,Counts).
-
-scheduler_graph(_Env, Input) ->
- Query = httpd:parse_query(Input),
- RangeMin = percept_html:get_option_value("range_min", Query),
- RangeMax = percept_html:get_option_value("range_max", Query),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
-
- StartTs = percept_db:select({system, start_ts}),
- TsMin = percept_analyzer:seconds2ts(RangeMin, StartTs),
- TsMax = percept_analyzer:seconds2ts(RangeMax, StartTs),
-
-
- Acts = percept_db:select({scheduler, [{ts_min, TsMin}, {ts_max,TsMax}]}),
-
- Counts = [{?seconds(Ts, StartTs), Scheds, 0} || #activity{where = Scheds, timestamp = Ts} <- Acts],
-
- percept_image:graph(Width, Height, Counts).
-
-activity_bar(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Pid = percept_html:get_option_value("pid", Query),
- Min = percept_html:get_option_value("range_min", Query),
- Max = percept_html:get_option_value("range_max", Query),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
-
- Data = percept_db:select({activity, [{id, Pid}]}),
- StartTs = percept_db:select({system, start_ts}),
- Activities = [{?seconds(Ts, StartTs), State} || #activity{timestamp = Ts, state = State} <- Data],
-
- percept_image:activities(Width, Height, {Min,Max},Activities).
-
-proc_lifetime(_Env, Input) ->
- Query = httpd:parse_query(Input),
- ProfileTime = percept_html:get_option_value("profiletime", Query),
- Start = percept_html:get_option_value("start", Query),
- End = percept_html:get_option_value("end", Query),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
- percept_image:proc_lifetime(round(Width), round(Height), float(Start), float(End), float(ProfileTime)).
-
-percentage(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Width = percept_html:get_option_value("width", Query),
- Height = percept_html:get_option_value("height", Query),
- Percentage = percept_html:get_option_value("percentage", Query),
- percept_image:percentage(round(Width), round(Height), float(Percentage)).
-
-header() ->
- "Content-Type: image/png\r\n\r\n".
diff --git a/lib/percept/src/percept_html.erl b/lib/percept/src/percept_html.erl
deleted file mode 100644
index a675227584..0000000000
--- a/lib/percept/src/percept_html.erl
+++ /dev/null
@@ -1,707 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
--module(percept_html).
--export([page/3,
- codelocation_page/3,
- databases_page/3,
- load_database_page/3,
- processes_page/3,
- concurrency_page/3,
- process_info_page/3]).
-
--export([value2pid/1,
- pid2value/1,
- get_option_value/2,
- join_strings_with/2]).
-
--include("percept.hrl").
--include_lib("kernel/include/file.hrl").
-
-
-%% API
-
-page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, overview_content(Env, Input)),
- ok = mod_esi:deliver(SessionID, footer()).
-
-processes_page(SessionID, _, _) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, processes_content()),
- ok = mod_esi:deliver(SessionID, footer()).
-
-concurrency_page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, concurrency_content(Env, Input)),
- ok = mod_esi:deliver(SessionID, footer()).
-
-databases_page(SessionID, _, _) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, databases_content()),
- ok = mod_esi:deliver(SessionID, footer()).
-
-codelocation_page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, codelocation_content(Env, Input)),
- ok = mod_esi:deliver(SessionID, footer()).
-
-process_info_page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
- ok = mod_esi:deliver(SessionID, menu()),
- ok = mod_esi:deliver(SessionID, process_info_content(Env, Input)),
- ok = mod_esi:deliver(SessionID, footer()).
-
-load_database_page(SessionID, Env, Input) ->
- ok = mod_esi:deliver(SessionID, header()),
-
- % Very dynamic page, handled differently
- load_database_content(SessionID, Env, Input),
- ok = mod_esi:deliver(SessionID, footer()).
-
-
-%%% --------------------------- %%%
-%%% Content pages %%%
-%%% --------------------------- %%%
-
-overview_content(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Min = get_option_value("range_min", Query),
- Max = get_option_value("range_max", Query),
- Width = 1200,
- Height = 600,
- TotalProfileTime = ?seconds( percept_db:select({system, stop_ts}),
- percept_db:select({system, start_ts})),
- RegisteredProcs = length(percept_db:select({information, procs})),
- RegisteredPorts = length(percept_db:select({information, ports})),
-
- InformationTable =
- "<table>" ++
- table_line(["Profile time:", TotalProfileTime]) ++
- table_line(["Processes:", RegisteredProcs]) ++
- table_line(["Ports:", RegisteredPorts]) ++
- table_line(["Min. range:", Min]) ++
- table_line(["Max. range:", Max]) ++
- "</table>",
-
- Header = "
- <div id=\"content\">
- <div>" ++ InformationTable ++ "</div>\n
- <form name=form_area method=POST action=/cgi-bin/percept_html/page>
- <input name=data_min type=hidden value=" ++ term2html(float(Min)) ++ ">
- <input name=data_max type=hidden value=" ++ term2html(float(Max)) ++ ">\n",
-
-
- RangeTable =
- "<table>"++
- table_line([
- "Min:",
- "<input name=range_min value=" ++ term2html(float(Min)) ++">",
- "<select name=\"graph_select\" onChange=\"select_image()\">
- <option disabled=true value=\""++ url_graph(Width, Height, Min, Max, []) ++"\" />Ports
- <option disabled=true value=\""++ url_graph(Width, Height, Min, Max, []) ++"\" />Processes
- <option value=\""++ url_graph(Width, Height, Min, Max, []) ++"\" />Ports & Processes
- </select>",
- "<input type=submit value=Update>"
- ]) ++
- table_line([
- "Max:",
- "<input name=range_max value=" ++ term2html(float(Max)) ++">",
- "",
- "<a href=/cgi-bin/percept_html/codelocation_page?range_min=" ++
- term2html(Min) ++ "&range_max=" ++ term2html(Max) ++ ">Code location</a>"
- ]) ++
- "</table>",
-
-
- MainTable =
- "<table>" ++
- table_line([div_tag_graph()]) ++
- table_line([RangeTable]) ++
- "</table>",
-
- Footer = "</div></form>",
-
- Header ++ MainTable ++ Footer.
-
-div_tag_graph() ->
- %background:url('/images/loader.gif') no-repeat center;
- "<div id=\"percept_graph\"
- onMouseDown=\"select_down(event)\"
- onMouseMove=\"select_move(event)\"
- onMouseUp=\"select_up(event)\"
-
- style=\"
- background-size: 100%;
- background-origin: content;
- width: 100%;
- position:relative;
- \">
-
- <div id=\"percept_areaselect\"
- style=\"background-color:#ef0909;
- position:relative;
- visibility:hidden;
- border-left: 1px solid #101010;
- border-right: 1px solid #101010;
- z-index:2;
- width:40px;
- height:40px;\"></div></div>".
-
--spec url_graph(
- Widht :: non_neg_integer(),
- Height :: non_neg_integer(),
- Min :: float(),
- Max :: float(),
- Pids :: [pid()]) -> string().
-
-url_graph(W, H, Min, Max, []) ->
- "/cgi-bin/percept_graph/graph?range_min=" ++ term2html(float(Min))
- ++ "&range_max=" ++ term2html(float(Max))
- ++ "&width=" ++ term2html(float(W))
- ++ "&height=" ++ term2html(float(H)).
-
-%%% process_info_content
-
-process_info_content(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Pid = get_option_value("pid", Query),
-
-
- [I] = percept_db:select({information, Pid}),
- ArgumentString = case I#information.entry of
- {_, _, Arguments} -> lists:flatten( [term2html(Arg) ++ "<br>" || Arg <- Arguments]);
- _ -> ""
- end,
-
- TimeTable = html_table([
- [{th, ""},
- {th, "Timestamp"},
- {th, "Profile Time"}],
- [{td, "Start"},
- term2html(I#information.start),
- term2html(procstarttime(I#information.start))],
- [{td, "Stop"},
- term2html(I#information.stop),
- term2html(procstoptime(I#information.stop))]
- ]),
-
- InfoTable = html_table([
- [{th, "Pid"}, term2html(I#information.id)],
- [{th, "Name"}, term2html(I#information.name)],
- [{th, "Entrypoint"}, mfa2html(I#information.entry)],
- [{th, "Arguments"}, ArgumentString],
- [{th, "Timetable"}, TimeTable],
- [{th, "Parent"}, pid2html(I#information.parent)],
- [{th, "Children"}, lists:flatten(lists:map(fun(Child) -> pid2html(Child) ++ " " end, I#information.children))]
- ]),
-
- PidActivities = percept_db:select({activity, [{id, Pid}]}),
- WaitingMfas = percept_analyzer:waiting_activities(PidActivities),
-
- TotalWaitTime = lists:sum( [T || {T, _, _} <- WaitingMfas] ),
-
- MfaTable = html_table([
- [{th, "percentage"},
- {th, "total"},
- {th, "mean"},
- {th, "stddev"},
- {th, "#recv"},
- {th, "module:function/arity"}]] ++ [
- [{td, image_string(percentage, [{width, 100}, {height, 10}, {percentage, Time/TotalWaitTime}])},
- {td, term2html(Time)},
- {td, term2html(Mean)},
- {td, term2html(StdDev)},
- {td, term2html(N)},
- {td, mfa2html(MFA)} ] || {Time, MFA, {Mean, StdDev, N}} <- WaitingMfas]),
-
- "<div id=\"content\">" ++
- InfoTable ++ "<br>" ++
- MfaTable ++
- "</div>".
-
-%%% concurrency content
-concurrency_content(_Env, Input) ->
- %% Get query
- Query = httpd:parse_query(Input),
-
- %% Collect selected pids and generate id tags
- Pids = [value2pid(PidValue) || {PidValue, Case} <- Query, Case == "on", PidValue /= "select_all"],
- IDs = [{id, Pid} || Pid <- Pids],
-
- % FIXME: A lot of extra work here, redo
-
- %% Analyze activities and calculate area bounds
- Activities = percept_db:select({activity, IDs}),
- StartTs = percept_db:select({system, start_ts}),
- Counts = [{Time, Y1 + Y2} || {Time, Y1, Y2} <- percept_analyzer:activities2count2(Activities, StartTs)],
- {T0,_,T1,_} = percept_analyzer:minmax(Counts),
-
- % FIXME: End
-
- PidValues = [pid2value(Pid) || Pid <- Pids],
-
- %% Generate activity bar requests
- ActivityBarTable = lists:foldl(
- fun(Pid, Out) ->
- ValueString = pid2value(Pid),
- Out ++
- table_line([
- pid2html(Pid),
- "<img onload=\"size_image(this, '" ++
- image_string_head("activity", [{"pid", ValueString}, {range_min, T0},{range_max, T1},{height, 10}], []) ++
- "')\" src=/images/white.png border=0 />"
- ])
- end, [], Pids),
-
- %% Make pids request string
- PidsRequest = join_strings_with(PidValues, ":"),
-
- "<div id=\"content\">
- <table cellspacing=0 cellpadding=0 border=0>" ++
- table_line([
- "",
- "<img onload=\"size_image(this, '" ++
- image_string_head("graph", [{"pids", PidsRequest},{range_min, T0}, {range_max, T1}, {height, 400}], []) ++
- "')\" src=/images/white.png border=0 />"
- ]) ++
- ActivityBarTable ++
- "</table></div>\n".
-
-processes_content() ->
- Ports = percept_db:select({information, ports}),
- UnsortedProcesses = percept_db:select({information, procs}),
- SystemStartTS = percept_db:select({system, start_ts}),
- SystemStopTS = percept_db:select({system, stop_ts}),
- ProfileTime = ?seconds( SystemStopTS,
- SystemStartTS),
- Processes = lists:sort(
- fun (A, B) ->
- if
- A#information.id > B#information.id -> true;
- true -> false
- end
- end, UnsortedProcesses),
-
- ProcsHtml = lists:foldl(
- fun (I, Out) ->
- StartTime = procstarttime(I#information.start),
- EndTime = procstoptime(I#information.stop),
- Prepare =
- table_line([
- "<input type=checkbox name=" ++ pid2value(I#information.id) ++ ">",
- pid2html(I#information.id),
- image_string(proc_lifetime, [
- {profiletime, ProfileTime},
- {start, StartTime},
- {"end", term2html(float(EndTime))},
- {width, 100},
- {height, 10}]),
- mfa2html(I#information.entry),
- term2html(I#information.name),
- pid2html(I#information.parent)
- ]),
- [Prepare|Out]
- end, [], Processes),
-
- PortsHtml = lists:foldl(
- fun (I, Out) ->
- StartTime = procstarttime(I#information.start),
- EndTime = procstoptime(I#information.stop),
- Prepare =
- table_line([
- "",
- pid2html(I#information.id),
- image_string(proc_lifetime, [
- {profiletime, ProfileTime},
- {start, StartTime},
- {"end", term2html(float(EndTime))},
- {width, 100},
- {height, 10}]),
- mfa2html(I#information.entry),
- term2html(I#information.name),
- pid2html(I#information.parent)
- ]),
- [Prepare|Out]
- end, [], Ports),
-
- Selector = "<table>" ++
- table_line([
- "<input onClick='selectall()' type=checkbox name=select_all>Select all"]) ++
- table_line([
- "<input type=submit value=Compare>"]) ++
- "</table>",
-
- if
- length(ProcsHtml) > 0 ->
- ProcsHtmlResult =
- "<tr><td><b>Processes</b></td></tr>
- <tr><td>
- <table width=700 cellspacing=0 border=0>
- <tr>
- <td align=middle width=40><b>Select</b></td>
- <td align=middle width=40><b>Pid</b></td>
- <td><b>Lifetime</b></td>
- <td><b>Entrypoint</b></td>
- <td><b>Name</b></td>
- <td><b>Parent</b></td>
- </tr>" ++
- lists:flatten(ProcsHtml) ++
- "</table>
- </td></tr>";
- true ->
- ProcsHtmlResult = ""
- end,
- if
- length(PortsHtml) > 0 ->
- PortsHtmlResult = "
- <tr><td><b>Ports</b></td></tr>
- <tr><td>
- <table width=700 cellspacing=0 border=0>
- <tr>
- <td align=middle width=40><b>Select</b></td>
- <td align=left width=40><b>Pid</b></td>
- <td><b>Lifetime</b></td>
- <td><b>Entrypoint</b></td>
- <td><b>Name</b></td>
- <td><b>Parent</b></td>
- </tr>" ++
- lists:flatten(PortsHtml) ++
- "</table>
- </td></tr>";
- true ->
- PortsHtmlResult = ""
- end,
-
- Right = "<div>"
- ++ Selector ++
- "</div>\n",
-
- Middle = "<div id=\"content\">
- <table>" ++
- ProcsHtmlResult ++
- PortsHtmlResult ++
- "</table>" ++
- Right ++
- "</div>\n",
-
- "<form name=process_select method=POST action=/cgi-bin/percept_html/concurrency_page>" ++
- Middle ++
- "</form>".
-
-procstarttime(TS) ->
- case TS of
- undefined -> 0.0;
- TS -> ?seconds(TS,percept_db:select({system, start_ts}))
- end.
-
-procstoptime(TS) ->
- case TS of
- undefined -> ?seconds( percept_db:select({system, stop_ts}),
- percept_db:select({system, start_ts}));
- TS -> ?seconds(TS, percept_db:select({system, start_ts}))
- end.
-
-databases_content() ->
- "<div id=\"content\">
- <form name=load_percept_file method=post action=/cgi-bin/percept_html/load_database_page>
- <center>
- <table>
- <tr><td>Enter file to analyse:</td><td><input type=hidden name=path /></td></tr>
- <tr><td><input type=file name=file size=40 /></td><td><input type=submit value=Load onClick=\"path.value = file.value;\" /></td></tr>
- </table>
- </center>
- </form>
- </div>".
-
-load_database_content(SessionId, _Env, Input) ->
- Query = httpd:parse_query(Input),
- {_,{_,Path}} = lists:keysearch("file", 1, Query),
- {_,{_,File}} = lists:keysearch("path", 1, Query),
- Filename = filename:join(Path, File),
- % Check path/file/filename
-
- ok = mod_esi:deliver(SessionId, "<div id=\"content\">"),
- case file:read_file_info(Filename) of
- {ok, _} ->
- Content = "<center>
- Parsing: " ++ Filename ++ "<br>
- </center>",
- ok = mod_esi:deliver(SessionId, Content),
- case percept:analyze(Filename) of
- {error, Reason} ->
- ok = mod_esi:deliver(SessionId, error_msg("Analyze" ++ term2html(Reason)));
- _ ->
- Complete = "<center><a href=\"/cgi-bin/percept_html/page\">View</a></center>",
- ok = mod_esi:deliver(SessionId, Complete)
- end;
- {error, Reason} ->
- ok = mod_esi:deliver(SessionId, error_msg("File" ++ term2html(Reason)))
- end,
- ok = mod_esi:deliver(SessionId, "</div>").
-
-codelocation_content(_Env, Input) ->
- Query = httpd:parse_query(Input),
- Min = get_option_value("range_min", Query),
- Max = get_option_value("range_max", Query),
- StartTs = percept_db:select({system, start_ts}),
- TsMin = percept_analyzer:seconds2ts(Min, StartTs),
- TsMax = percept_analyzer:seconds2ts(Max, StartTs),
- Acts = percept_db:select({activity, [{ts_min, TsMin}, {ts_max, TsMax}]}),
-
- Secs = [timer:now_diff(A#activity.timestamp,StartTs)/1000 || A <- Acts],
- Delta = cl_deltas(Secs),
- Zip = lists:zip(Acts, Delta),
- Table = html_table([
- [{th, "delta [ms]"},
- {th, "time [ms]"},
- {th, " pid "},
- {th, "activity"},
- {th, "module:function/arity"},
- {th, "#runnables"}]] ++ [
- [{td, term2html(D)},
- {td, term2html(timer:now_diff(A#activity.timestamp,StartTs)/1000)},
- {td, pid2html(A#activity.id)},
- {td, term2html(A#activity.state)},
- {td, mfa2html(A#activity.where)},
- {td, term2html(A#activity.runnable_count)}] || {A, D} <- Zip ]),
-
- "<div id=\"content\">" ++
- Table ++
- "</div>".
-
-cl_deltas([]) -> [];
-cl_deltas(List) -> cl_deltas(List, [0.0]).
-cl_deltas([_], Out) -> lists:reverse(Out);
-cl_deltas([A,B|Ls], Out) -> cl_deltas([B|Ls], [B - A | Out]).
-
-%%% --------------------------- %%%
-%%% Utility functions %%%
-%%% --------------------------- %%%
-
-%% Should be in string stdlib?
-
-join_strings(Strings) ->
- lists:flatten(Strings).
-
--spec join_strings_with(Strings :: [string()], Separator :: string()) -> string().
-
-join_strings_with([S1, S2 | R], S) ->
- join_strings_with([join_strings_with(S1,S2,S) | R], S);
-join_strings_with([S], _) ->
- S.
-join_strings_with(S1, S2, S) ->
- join_strings([S1,S,S2]).
-
-%%% Generic erlang2html
-
--spec html_table(Rows :: [[string() | {'td' | 'th', string()}]]) -> string().
-
-html_table(Rows) -> "<table>" ++ html_table_row(Rows) ++ "</table>".
-
-html_table_row(Rows) -> html_table_row(Rows, odd).
-html_table_row([], _) -> "";
-html_table_row([Row|Rows], odd ) -> "<tr class=\"odd\">" ++ html_table_data(Row) ++ "</tr>" ++ html_table_row(Rows, even);
-html_table_row([Row|Rows], even) -> "<tr class=\"even\">" ++ html_table_data(Row) ++ "</tr>" ++ html_table_row(Rows, odd ).
-
-html_table_data([]) -> "";
-html_table_data([{td, Data}|Row]) -> "<td>" ++ Data ++ "</td>" ++ html_table_data(Row);
-html_table_data([{th, Data}|Row]) -> "<th>" ++ Data ++ "</th>" ++ html_table_data(Row);
-html_table_data([Data|Row]) -> "<td>" ++ Data ++ "</td>" ++ html_table_data(Row).
-
-
-
-
--spec table_line(Table :: [any()]) -> string().
-
-table_line(List) -> table_line(List, ["<tr>"]).
-table_line([], Out) -> lists:flatten(lists:reverse(["</tr>\n"|Out]));
-table_line([Element | Elements], Out) when is_list(Element) ->
- table_line(Elements, ["<td>" ++ Element ++ "</td>" |Out]);
-table_line([Element | Elements], Out) ->
- table_line(Elements, ["<td>" ++ term2html(Element) ++ "</td>"|Out]).
-
--spec term2html(any()) -> string().
-
-term2html(Term) when is_float(Term) -> lists:flatten(io_lib:format("~.4f", [Term]));
-term2html(Term) -> lists:flatten(io_lib:format("~p", [Term])).
-
--spec mfa2html(MFA :: {atom(), atom(), list() | integer()}) -> string().
-
-mfa2html({Module, Function, Arguments}) when is_list(Arguments) ->
- lists:flatten(io_lib:format("~p:~p/~p", [Module, Function, length(Arguments)]));
-mfa2html({Module, Function, Arity}) when is_integer(Arity) ->
- lists:flatten(io_lib:format("~p:~p/~p", [Module, Function, Arity]));
-mfa2html(_) ->
- "undefined".
-
--spec pid2html(Pid :: pid() | port()) -> string().
-
-pid2html(Pid) when is_pid(Pid) ->
- PidString = term2html(Pid),
- PidValue = pid2value(Pid),
- "<a href=\"/cgi-bin/percept_html/process_info_page?pid="++PidValue++"\">"++PidString++"</a>";
-pid2html(Pid) when is_port(Pid) ->
- term2html(Pid);
-pid2html(_) ->
- "undefined".
-
--spec image_string(Request :: string()) -> string().
-
-image_string(Request) ->
- "<img border=0 src=\"/cgi-bin/percept_graph/" ++
- Request ++
- " \">".
-
--spec image_string(atom() | string(), list()) -> string().
-
-image_string(Request, Options) when is_atom(Request), is_list(Options) ->
- image_string(image_string_head(erlang:atom_to_list(Request), Options, []));
-image_string(Request, Options) when is_list(Options) ->
- image_string(image_string_head(Request, Options, [])).
-
-image_string_head(Request, [{Type, Value} | Opts], Out) when is_atom(Type), is_number(Value) ->
- Opt = join_strings(["?",term2html(Type),"=",term2html(Value)]),
- image_string_tail(Request, Opts, [Opt|Out]);
-image_string_head(Request, [{Type, Value} | Opts], Out) ->
- Opt = join_strings(["?",Type,"=",Value]),
- image_string_tail(Request, Opts, [Opt|Out]).
-
-image_string_tail(Request, [], Out) ->
- join_strings([Request | lists:reverse(Out)]);
-image_string_tail(Request, [{Type, Value} | Opts], Out) when is_atom(Type), is_number(Value) ->
- Opt = join_strings(["&",term2html(Type),"=",term2html(Value)]),
- image_string_tail(Request, Opts, [Opt|Out]);
-image_string_tail(Request, [{Type, Value} | Opts], Out) ->
- Opt = join_strings(["&",Type,"=",Value]),
- image_string_tail(Request, Opts, [Opt|Out]).
-
-
-%%% percept conversions
-
--spec pid2value(Pid :: pid()) -> string().
-
-pid2value(Pid) ->
- String = lists:flatten(io_lib:format("~p", [Pid])),
- lists:sublist(String, 2, erlang:length(String)-2).
-
--spec value2pid(Value :: string()) -> pid().
-
-value2pid(Value) ->
- String = lists:flatten("<" ++ Value ++ ">"),
- erlang:list_to_pid(String).
-
-
-%%% get value
-
--spec get_option_value(Option :: string(), Options :: [{string(),any()}]) ->
- {'error', any()} | boolean() | pid() | [pid()] | number().
-
-get_option_value(Option, Options) ->
- case catch get_option_value0(Option, Options) of
- {'EXIT', Reason} -> {error, Reason};
- Value -> Value
- end.
-
-get_option_value0(Option, Options) ->
- case lists:keysearch(Option, 1, Options) of
- false -> get_default_option_value(Option);
- {value, {Option, _Value}} when Option == "fillcolor" -> true;
- {value, {Option, Value}} when Option == "pid" -> value2pid(Value);
- {value, {Option, Value}} when Option == "pids" ->
- [value2pid(PidValue) || PidValue <- string:tokens(Value,":")];
- {value, {Option, Value}} -> get_number_value(Value);
- _ -> {error, undefined}
- end.
-
-get_default_option_value(Option) ->
- case Option of
- "fillcolor" -> false;
- "range_min" -> float(0.0);
- "pids" -> [];
- "range_max" ->
- Acts = percept_db:select({activity, []}),
- #activity{ timestamp = Start } = hd(Acts),
- #activity{ timestamp = Stop } = hd(lists:reverse(Acts)),
- ?seconds(Stop,Start);
- "width" -> 700;
- "height" -> 400;
- _ -> {error, {undefined_default_option, Option}}
- end.
-
--spec get_number_value(string()) -> number() | {'error', 'illegal_number'}.
-
-get_number_value(Value) ->
- % Try float
- case string:to_float(Value) of
- {error, no_float} ->
- % Try integer
- case string:to_integer(Value) of
- {error, _} -> {error, illegal_number};
- {Integer, _} -> Integer
- end;
- {error, _} -> {error, illegal_number};
- {Float, _} -> Float
- end.
-
-%%% --------------------------- %%%
-%%% html prime functions %%%
-%%% --------------------------- %%%
-
-header() -> header([]).
-header(HeaderData) ->
- "Content-Type: text/html\r\n\r\n" ++
- "<html>
- <head>
- <meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\">
- <title>percept</title>
- <link href=\"/css/percept.css\" rel=\"stylesheet\" type=\"text/css\">
- <script type=\"text/javascript\" src=\"/javascript/percept_error_handler.js\"></script>
- <script type=\"text/javascript\" src=\"/javascript/percept_select_all.js\"></script>
- <script type=\"text/javascript\" src=\"/javascript/percept_area_select.js\"></script>
- " ++ HeaderData ++"
- </head>
- <body onLoad=\"load_image()\">
- <div id=\"header\"><a href=/index.html>percept</a></div>\n".
-
-footer() ->
- "</body>
- </html>\n".
-
-menu() ->
- "<div id=\"menu\" class=\"menu_tabs\">
- <ul>
- <li><a href=/cgi-bin/percept_html/databases_page>databases</a></li>
- <li><a href=/cgi-bin/percept_html/processes_page>processes</a></li>
- <li><a href=/cgi-bin/percept_html/page>overview</a></li>
- </ul></div>\n".
-
--spec error_msg(Error :: string()) -> string().
-
-error_msg(Error) ->
- "<table width=300>
- <tr height=5><td></td> <td></td></tr>
- <tr><td width=150 align=right><b>Error: </b></td> <td align=left>"++ Error ++ "</td></tr>
- <tr height=5><td></td> <td></td></tr>
- </table>\n".
diff --git a/lib/percept/src/percept_image.erl b/lib/percept/src/percept_image.erl
deleted file mode 100644
index e819938027..0000000000
--- a/lib/percept/src/percept_image.erl
+++ /dev/null
@@ -1,316 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-
--module(percept_image).
--export([ proc_lifetime/5,
- percentage/3,
- graph/3,
- graph/4,
- activities/3,
- activities/4]).
--record(graph_area, {x = 0, y = 0, width, height}).
--compile(inline).
-
-%%% -------------------------------------
-%%% GRAF
-%%% -------------------------------------
-
-%% graph(Widht, Height, Range, Data)
-
-graph(Width, Height, {RXmin, RYmin, RXmax, RYmax}, Data) ->
- Data2 = [{X, Y1 + Y2} || {X, Y1, Y2} <- Data],
- MinMax = percept_analyzer:minmax(Data2),
- {Xmin, Ymin, Xmax, Ymax} = MinMax,
- graf1(Width, Height,{ lists:min([RXmin, Xmin]),
- lists:min([RYmin, Ymin]),
- lists:max([RXmax, Xmax]),
- lists:max([RYmax, Ymax])}, Data).
-
-%% graph(Widht, Height, Data) = Image
-%% In:
-%% Width = integer(),
-%% Height = integer(),
-%% Data = [{Time, Procs, Ports}]
-%% Time = float()
-%% Procs = integer()
-%% Ports = integer()
-%% Out:
-%% Image = binary()
-
-graph(Width, Height, Data) ->
- Data2 = [{X, Y1 + Y2} || {X, Y1, Y2} <- Data],
- Bounds = percept_analyzer:minmax(Data2),
- graf1(Width, Height, Bounds, Data).
-
-graf1(Width, Height, {Xmin, Ymin, Xmax, Ymax}, Data) ->
- % Calculate areas
- HO = 20,
- GrafArea = #graph_area{x = HO, y = 4, width = Width - 2*HO, height = Height - 17},
- XticksArea = #graph_area{x = HO, y = Height - 13, width = Width - 2*HO, height = 13},
- YticksArea = #graph_area{x = 1, y = 4, width = HO, height = Height - 17},
-
- %% Initiate Image
-
- Image = egd:create(Width, Height),
-
- %% Set colors
-
- Black = egd:color(Image, {0, 0, 0}),
- ProcColor = egd:color(Image, {0, 255, 0}),
- PortColor = egd:color(Image, {255, 0, 0}),
-
- %% Draw graf, xticks and yticks
- draw_graf(Image, Data, {Black, ProcColor, PortColor}, GrafArea, {Xmin, Ymin, Xmax, Ymax}),
- draw_xticks(Image, Black, XticksArea, {Xmin, Xmax}, Data),
- draw_yticks(Image, Black, YticksArea, {Ymin, Ymax}),
-
- %% Kill image and return binaries
- Binary = egd:render(Image, png),
- egd:destroy(Image),
- Binary.
-
-%% draw_graf(Image, Data, Color, GraphArea, DataBounds)
-%% Image, port to Image
-%% Data, list of three tuple data, (X, Y1, Y2)
-%% Color, {ForegroundColor, ProcFillColor, PortFillColor}
-%% DataBounds, {Xmin, Ymin, Xmax, Ymax}
-
-draw_graf(Im, Data, Colors, GA = #graph_area{x = X0, y = Y0, width = Width, height = Height}, {Xmin, _Ymin, Xmax, Ymax}) ->
- Dx = (Width)/(Xmax - Xmin),
- Dy = (Height)/(Ymax),
- Plotdata = [{trunc(X0 + X*Dx - Xmin*Dx), trunc(Y0 + Height - Y1*Dy), trunc(Y0 + Height - (Y1 + Y2)*Dy)} || {X, Y1, Y2} <- Data],
- draw_graf(Im, Plotdata, Colors, GA).
-
-draw_graf(Im, [{X1, Yproc1, Yport1}, {X2, Yproc2, Yport2}|Data], C, GA) when X2 - X1 < 1 ->
- draw_graf(Im, [{X1, [{Yproc2, Yport2},{Yproc1, Yport1}]}|Data], C, GA);
-
-draw_graf(Im, [{X1, Ys1}, {X2, Yproc2, Yport2}|Data], C, GA) when X2 - X1 < 1, is_list(Ys1) ->
- draw_graf(Im, [{X1, [{Yproc2, Yport2}|Ys1]}|Data], C, GA);
-
-draw_graf(Im, [{X1, Yproc1, Yport1}, {X2, Yproc2, Yport2}|Data], C = {B, PrC, PoC}, GA = #graph_area{y = Y0, height = H}) ->
- GyZero = trunc(Y0 + H),
- egd:filledRectangle(Im, {X1, GyZero}, {X2, Yproc1}, PrC),
- egd:filledRectangle(Im, {X1, Yproc1}, {X2, Yport1}, PoC),
- egd:line(Im, {X1, Yport1}, {X2, Yport1}, B), % top line
- egd:line(Im, {X1, Yport2}, {X1, Yport1}, B), % right line
- egd:line(Im, {X2, Yport1}, {X2, Yport2}, B), % right line
- draw_graf(Im, [{X2, Yproc2, Yport2}|Data], C, GA);
-
-draw_graf(Im, [{X1, Ys1 = [{Yproc1,Yport1}|_]}, {X2, Yproc2, Yport2}|Data], C = {B, PrC, PoC}, GA = #graph_area{y = Y0, height = H}) ->
- GyZero = trunc(Y0 + H),
- Yprocs = [Yp || {Yp, _} <- Ys1],
- Yports = [Yp || {_, Yp} <- Ys1],
-
- YprMin = lists:min(Yprocs),
- YprMax = lists:max(Yprocs),
- YpoMax = lists:max(Yports),
- egd:filledRectangle(Im, {X1, GyZero}, {X2, Yproc1}, PrC),
- egd:filledRectangle(Im, {X1, Yproc1}, {X2, Yport1}, PoC),
- egd:filledRectangle(Im, {X1, Yport1}, {X2, Yport1}, B), % top line
- egd:filledRectangle(Im, {X2, Yport1}, {X2, Yport2}, B), % right line
-
- egd:filledRectangle(Im, {X1, GyZero}, {X1, YprMin}, PrC), % left proc green line
- egd:filledRectangle(Im, {X1, YprMax}, {X1, YpoMax}, PoC), % left port line
- egd:filledRectangle(Im, {X1, YprMax}, {X1, YprMin}, B),
-
- draw_graf(Im, [{X2, Yproc2, Yport2}|Data], C, GA);
-draw_graf(_, _, _, _) -> ok.
-
-draw_xticks(Image, Color, XticksArea, {Xmin, Xmax}, Data) ->
- #graph_area{x = X0, y = Y0, width = Width} = XticksArea,
-
- DX = Width/(Xmax - Xmin),
- Offset = X0 - Xmin*DX,
- Y = trunc(Y0),
- Font = load_font(),
- {FontW, _FontH} = egd_font:size(Font),
- egd:filledRectangle(Image, {trunc(X0), Y}, {trunc(X0 + Width), Y}, Color),
- lists:foldl(
- fun ({X,_,_}, PX) ->
- X1 = trunc(Offset + X*DX),
-
- % Optimization:
- % if offset has past half the previous text
- % start checking this text
-
- if
- X1 > PX ->
- Text = lists:flatten(io_lib:format("~.3f", [float(X)])),
- TextLength = length(Text),
- TextWidth = TextLength*FontW,
- Spacing = 2,
- if
- X1 > PX + round(TextWidth/2) + Spacing ->
- egd:line(Image, {X1, Y - 3}, {X1, Y + 3}, Color),
- text(Image, {X1 - round(TextWidth/2), Y + 2}, Font, Text, Color),
- X1 + round(TextWidth/2) + Spacing;
- true ->
- PX
- end;
- true ->
- PX
- end
- end, 0, Data).
-
-draw_yticks(Im, Color, TickArea, {_,Ymax}) ->
- #graph_area{x = X0, y = Y0, width = Width, height = Height} = TickArea,
- Font = load_font(),
- X = trunc(X0 + Width),
- Dy = (Height)/(Ymax),
- Yts = if
- Height/(Ymax*12) < 1.0 -> round(1 + Ymax*15/Height);
- true -> 1
- end,
- egd:filledRectangle(Im, {X, trunc(0 + Y0)}, {X, trunc(Y0 + Height)}, Color),
- draw_yticks0(Im, Font, Color, 0, Yts, Ymax, {X, Height, Dy}).
-
-draw_yticks0(Im, Font, Color, Yi, Yts, Ymax, Area) when Yi < Ymax ->
- {X, Height, Dy} = Area,
- Y = round(Height - (Yi*Dy) + 3),
-
- egd:filledRectangle(Im, {X - 3, Y}, {X + 3, Y}, Color),
- Text = lists:flatten(io_lib:format("~p", [Yi])),
- text(Im, {0, Y - 4}, Font, Text, Color),
- draw_yticks0(Im, Font, Color, Yi + Yts, Yts, Ymax, Area);
-draw_yticks0(_, _, _, _, _, _, _) -> ok.
-
-%%% -------------------------------------
-%%% ACTIVITIES
-%%% -------------------------------------
-
-%% activities(Width, Height, Range, Activities) -> Binary
-%% In:
-%% Width = integer()
-%% Height = integer()
-%% Range = {float(), float()}
-%% Activities = [{float(), active | inactive}]
-%% Out:
-%% Binary = binary()
-
-activities(Width, Height, {UXmin, UXmax}, Activities) ->
- Xs = [ X || {X,_} <- Activities],
- Xmin = lists:min(Xs),
- Xmax = lists:max(Xs),
- activities0(Width, Height, {lists:min([Xmin, UXmin]), lists:max([UXmax, Xmax])}, Activities).
-
-activities(Width, Height, Activities) ->
- Xs = [ X || {X,_} <- Activities],
- Xmin = lists:min(Xs),
- Xmax = lists:max(Xs),
- activities0(Width, Height, {Xmin, Xmax}, Activities).
-
-activities0(Width, Height, {Xmin, Xmax}, Activities) ->
- Image = egd:create(Width, Height),
- Grey = egd:color(Image, {200, 200, 200}),
- HO = 20,
- ActivityArea = #graph_area{x = HO, y = 0, width = Width - 2*HO, height = Height},
- egd:filledRectangle(Image, {0, 0}, {Width, Height}, Grey),
- draw_activity(Image, {Xmin, Xmax}, ActivityArea, Activities),
- Binary = egd:render(Image, png),
- egd:destroy(Image),
- Binary.
-
-draw_activity(Image, {Xmin, Xmax}, Area = #graph_area{ width = Width }, Acts) ->
- White = egd:color({255, 255, 255}),
- Green = egd:color({0,250, 0}),
- Black = egd:color({0, 0, 0}),
-
- Dx = Width/(Xmax - Xmin),
-
- draw_activity(Image, {Xmin, Xmax}, Area, {White, Green, Black}, Dx, Acts).
-
-draw_activity(_, _, _, _, _, [_]) -> ok;
-draw_activity(Image, {Xmin, Xmax}, Area = #graph_area{ height = Height, x = X0 }, {Cw, Cg, Cb}, Dx, [{Xa1, State}, {Xa2, Act2} | Acts]) ->
- X1 = erlang:trunc(X0 + Dx*Xa1 - Xmin*Dx),
- X2 = erlang:trunc(X0 + Dx*Xa2 - Xmin*Dx),
-
- case State of
- inactive ->
- egd:filledRectangle(Image, {X1, 0}, {X2, Height - 1}, Cw),
- egd:rectangle(Image, {X1, 0}, {X2, Height - 1}, Cb);
- active ->
- egd:filledRectangle(Image, {X1, 0}, {X2, Height - 1}, Cg),
- egd:rectangle(Image, {X1, 0}, {X2, Height - 1}, Cb)
- end,
- draw_activity(Image, {Xmin, Xmax}, Area, {Cw, Cg, Cb}, Dx, [{Xa2, Act2} | Acts]).
-
-
-
-%%% -------------------------------------
-%%% Process lifetime
-%%% Used by processes page
-%%% -------------------------------------
-
-proc_lifetime(Width, Height, Start, End, ProfileTime) ->
- Im = egd:create(round(Width), round(Height)),
- Black = egd:color(Im, {0, 0, 0}),
- Green = egd:color(Im, {0, 255, 0}),
-
- % Ratio and coordinates
-
- DX = (Width-1)/ProfileTime,
- X1 = round(DX*Start),
- X2 = round(DX*End),
-
- % Paint
- egd:filledRectangle(Im, {X1, 0}, {X2, Height - 1}, Green),
- egd:rectangle(Im, {X1, 0}, {X2, Height - 1}, Black),
-
- Binary = egd:render(Im, png),
- egd:destroy(Im),
- Binary.
-
-%%% -------------------------------------
-%%% Percentage
-%%% Used by process_info page
-%%% Percentage should be 0.0 -> 1.0
-%%% -------------------------------------
-percentage(Width, Height, Percentage) ->
- Im = egd:create(round(Width), round(Height)),
- Font = load_font(),
- Black = egd:color(Im, {0, 0, 0}),
- Green = egd:color(Im, {0, 255, 0}),
-
- % Ratio and coordinates
-
- X = round(Width - 1 - Percentage*(Width - 1)),
-
- % Paint
- egd:filledRectangle(Im, {X, 0}, {Width - 1, Height - 1}, Green),
- {FontW, _} = egd_font:size(Font),
- String = lists:flatten(io_lib:format("~.10B %", [round(100*Percentage)])),
-
- text( Im,
- {round(Width/2 - (FontW*length(String)/2)), 0},
- Font,
- String,
- Black),
- egd:rectangle(Im, {X, 0}, {Width - 1, Height - 1}, Black),
-
- Binary = egd:render(Im, png),
- egd:destroy(Im),
- Binary.
-
-
-load_font() ->
- Filename = filename:join([code:priv_dir(percept),"fonts", "6x11_latin1.wingsfont"]),
- egd_font:load(Filename).
-
-text(Image, {X,Y}, Font, Text, Color) ->
- egd:text(Image, {X,Y-2}, Font, Text, Color).
diff --git a/lib/percept/test/Makefile b/lib/percept/test/Makefile
deleted file mode 100644
index 87fde49410..0000000000
--- a/lib/percept/test/Makefile
+++ /dev/null
@@ -1,91 +0,0 @@
-#
-# %CopyrightBegin%
-#
-# Copyright Ericsson AB 2007-2016. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# %CopyrightEnd%
-#
-
-include $(ERL_TOP)/make/target.mk
-
-include $(ERL_TOP)/make/$(TARGET)/otp.mk
-
-# ----------------------------------------------------
-# Target Specs
-# ----------------------------------------------------
-
-MODULES= \
- ipc_tree \
- percept_SUITE \
- egd_SUITE
-
-EBIN = .
-
-HRL_FILES=
-
-ERL_FILES= $(MODULES:%=%.erl)
-
-TARGET_FILES = $(MODULES:%=$(EBIN)/%.$(EMULATOR))
-
-SOURCE = $(ERL_FILES) $(HRL_FILES)
-
-EMAKEFILE=Emakefile
-
-# ----------------------------------------------------
-# Release directory specification
-# ----------------------------------------------------
-RELSYSDIR = $(RELEASE_PATH)/percept_test
-
-# ----------------------------------------------------
-# FLAGS
-# ----------------------------------------------------
-ERL_MAKE_FLAGS +=
-ERL_COMPILE_FLAGS += -I$(ERL_TOP)/lib/percept/include
-
-# ----------------------------------------------------
-# Targets
-# ----------------------------------------------------
-
-make_emakefile:
- $(ERL_TOP)/make/make_emakefile $(ERL_COMPILE_FLAGS) -o$(EBIN) $(MODULES)\
- > $(EMAKEFILE)
-
-tests debug opt: make_emakefile
- erl $(ERL_MAKE_FLAGS) -make
-
-clean:
- rm -f $(EMAKEFILE)
- rm -f $(TARGET_FILES)
- rm -f core *~
-
-docs:
-
-
-# ----------------------------------------------------
-# Release Target
-# ----------------------------------------------------
-include $(ERL_TOP)/make/otp_release_targets.mk
-
-release_spec: opt
-
-release_tests_spec: make_emakefile
- $(INSTALL_DIR) "$(RELSYSDIR)"
- $(INSTALL_DATA) percept.spec percept.cover $(EMAKEFILE) $(SOURCE) "$(RELSYSDIR)"
- chmod -R u+w "$(RELSYSDIR)"
- @tar cf - *_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -)
-
-release_docs_spec:
-
-
diff --git a/lib/percept/test/egd_SUITE.erl b/lib/percept/test/egd_SUITE.erl
deleted file mode 100644
index 401695dddd..0000000000
--- a/lib/percept/test/egd_SUITE.erl
+++ /dev/null
@@ -1,389 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(egd_SUITE).
--include_lib("common_test/include/ct.hrl").
-
-%% Test server specific exports
--export([all/0, suite/0]).
--export([init_per_suite/1, end_per_suite/1]).
--export([init_per_testcase/2, end_per_testcase/2]).
-
-%% Test cases
--export([image_create_and_destroy/1,
- image_shape/1,
- image_primitives/1,
- image_colors/1,
- image_font/1,
- image_fans/1,
- image_png_compliant/1]).
-
-suite() ->
- [{ct_hooks,[ts_install_cth]},
- {timetrap, {minutes, 1}}].
-
-all() ->
- [image_create_and_destroy, image_shape,
- image_primitives, image_colors, image_font,
- image_fans,
- image_png_compliant].
-
-
-init_per_suite(Config) when is_list(Config) ->
- rand:seed(exsplus),
- Config.
-
-end_per_suite(Config) when is_list(Config) ->
- Config.
-
-init_per_testcase(_Case, Config) ->
- [{max_size, 800}|Config].
-
-end_per_testcase(_Case, _Config) ->
- ok.
-
-%%----------------------------------------------------------------------
-%% Tests
-%%----------------------------------------------------------------------
-
-%% Image creation and destroy test.
-image_create_and_destroy(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Image = egd:create(W, H),
- ok = egd:destroy(Image),
- ok.
-
-%% Image color test.
-image_colors(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Dir = proplists:get_value(priv_dir, Config),
- Image = egd:create(W, H),
- put(image_size, {W,H}),
-
- RGB = get_rgb(),
- Black = egd:color({0,0,0}),
- Red = egd:color({255,0,0}),
- Green = egd:color({0,255,0}),
- Blue = egd:color({0,0,255}),
- Random = egd:color(Image, RGB),
-
- ok = egd:line(Image, get_point(), get_point(), Random),
- ok = egd:line(Image, get_point(), get_point(), Red),
- ok = egd:line(Image, get_point(), get_point(), Green),
- ok = egd:line(Image, get_point(), get_point(), Black),
- ok = egd:line(Image, get_point(), get_point(), Blue),
-
- HtmlDefaultNames = [black,silver,gray,white,maroon,red,
- purple,fuchia,green,lime,olive,yellow,navy,blue,teal,
- aqua],
-
- lists:foreach(fun (ColorName) ->
- Color = egd:color(ColorName),
- ok = egd:line(Image, get_point(), get_point(), Color)
- end, HtmlDefaultNames),
-
- Png1 = <<_/binary>> = egd:render(Image,png,[{render_engine, alpha}]),
- File1 = filename:join(Dir,"image_colors_alpha.png"),
- ok = egd:save(Png1,File1),
- ct:log("<p>Image alpha:</p><img src=\"~s\" />~n", [File1]),
- Png2 = <<_/binary>> = egd:render(Image,png,[{render_engine, opaque}]),
- File2 = filename:join(Dir,"image_colors_opaque.png"),
- ok = egd:save(Png2,File2),
- ct:log("<p>Image opaque:</p><img src=\"~s\" />~n", [File2]),
-
- ok = egd:destroy(Image),
- erase(image_size),
- ok.
-
-%% Image shape API test.
-image_shape(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Dir = proplists:get_value(priv_dir, Config),
- put(image_size, {W,H}),
- Im = egd:create(W, H),
-
- Fgc = egd:color({255,0,0}),
-
- ok = egd:line(Im, get_point(), get_point(), Fgc),
- ok = egd:rectangle(Im, get_point(), get_point(), Fgc),
- ok = egd:filledEllipse(Im, get_point(), get_point(), Fgc),
- ok = egd:arc(Im, get_point(), get_point(), Fgc),
- ok = egd:arc(Im, get_point(), get_point(), 100, Fgc),
-
- Pt1 = get_point(),
- Pt2 = get_point(),
-
- ok = egd:filledRectangle(Im, Pt1, Pt2, Fgc),
-
- Bitmap = egd:render(Im, raw_bitmap),
-
- ok = bitmap_point_has_color(Bitmap, {W,H}, Pt2, Fgc),
- ok = bitmap_point_has_color(Bitmap, {W,H}, Pt1, Fgc),
-
- Bin = <<_/binary>> = egd:render(Im, raw_bitmap, [{render_engine, alpha}]),
- Png = egd_png:binary(W,H,Bin),
- File = filename:join(Dir,"image_shape.png"),
- ok = egd:save(Png,File),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File]),
-
- ok = egd:destroy(Im),
-
- erase(image_size),
- ok.
-
-%% Image shape API test.
-image_primitives(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Dir = proplists:get_value(priv_dir, Config),
- put(image_size, {W,H}),
-
- Im0 = egd_primitives:create(W, H),
- Fgc = egd:color({25,25,255}),
- Bgc = egd:color({0,250,25}),
-
- Im1 = lists:foldl(fun ({Function, Arguments}, Im) ->
- erlang:apply(egd_primitives, Function, [Im|Arguments])
- end, Im0,
- [{Fs, [get_point(), get_point(), Bgc]} || Fs <- [line, rectangle, filledEllipse, arc]] ++
- [{pixel, [get_point(), Bgc]},
- {filledTriangle, [get_point(), get_point(), get_point(), Bgc]}]),
-
- Pt1 = get_point(),
- Pt2 = get_point(),
-
- Im2 = egd_primitives:filledRectangle(Im1, Pt1, Pt2, Fgc),
-
- Bitmap = egd_render:binary(Im2, opaque),
-
- ok = bitmap_point_has_color(Bitmap, {W,H}, Pt2, Fgc),
- ok = bitmap_point_has_color(Bitmap, {W,H}, Pt1, Fgc),
-
- Bin = <<_/binary>> = egd_render:binary(Im2, alpha),
- Png = egd_png:binary(W,H,Bin),
- File = filename:join(Dir,"image_primitives.png"),
- ok = egd:save(Png,File),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File]),
-
- erase(image_size),
- ok.
-
-%% Image font test.
-image_font(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- Dir = proplists:get_value(priv_dir, Config),
- put(image_size, {W,H}),
- Im = egd:create(W, H),
- Fgc = egd:color({0,130,0}),
-
- Filename = filename:join([code:priv_dir(percept),"fonts","6x11_latin1.wingsfont"]),
- Font = egd_font:load(Filename),
-
- % simple text
- ok = egd:text(Im, get_point(), Font, "Hello World", Fgc),
- <<_/binary>> = egd:render(Im, png),
-
- GlyphStr1 = " !\"#$%&'()*+,-./", % Codes 32 -> 47
- NumericStr = "0123456789", % Codes 48 -> 57
- GlyphStr2 = ":;<=>?@", % Codes 58 -> 64
- AlphaBigStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZ", % Codes 65 -> 90
- GlyphStr3 = "[\\]^_`", % Codes 91 -> 96
- AlphaSmStr = "abcdefghijklmnopqrstuvwxyz", % Codes 97 -> 122
- GlyphStr4 = "{|}~", % Codes 123 -> 126
-
- ok = egd:text(Im, get_point(), Font, GlyphStr1, Fgc),
- Png1 = <<_/binary>> = egd:render(Im, png),
- File1 = filename:join(Dir,"text1.png"),
- ok = egd:save(Png1,File1),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File1]),
-
- ok = egd:text(Im, get_point(), Font, NumericStr, Fgc),
- Png2 = <<_/binary>> = egd:render(Im, png),
- File2 = filename:join(Dir,"text2.png"),
- ok = egd:save(Png2,File2),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File2]),
-
- ok = egd:text(Im, get_point(), Font, GlyphStr2, Fgc),
- Png3 = <<_/binary>> = egd:render(Im, png),
- File3 = filename:join(Dir,"text3.png"),
- ok = egd:save(Png3,File3),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File3]),
-
- ok = egd:text(Im, get_point(), Font, AlphaBigStr, Fgc),
- Png4 = <<_/binary>> = egd:render(Im, png),
- File4 = filename:join(Dir,"text4.png"),
- ok = egd:save(Png4,File4),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File4]),
-
- ok = egd:text(Im, get_point(), Font, GlyphStr3, Fgc),
- Png5 = <<_/binary>> = egd:render(Im, png),
- File5 = filename:join(Dir,"text5.png"),
- ok = egd:save(Png5,File5),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File5]),
-
- ok = egd:text(Im, get_point(), Font, AlphaSmStr, Fgc),
- Png6 = <<_/binary>> = egd:render(Im, png),
- File6 = filename:join(Dir,"text6.png"),
- ok = egd:save(Png6,File6),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File6]),
-
- ok = egd:text(Im, get_point(), Font, GlyphStr4, Fgc),
- Png7 = <<_/binary>> = egd:render(Im, png),
- File7 = filename:join(Dir,"text7.png"),
- ok = egd:save(Png7,File7),
- ct:log("<p>Image:</p><img src=\"~s\" />~n", [File7]),
-
- ok = egd:destroy(Im),
- erase(image_size),
- ok.
-
-%% Image png compliant test.
-image_png_compliant(Config) when is_list(Config) ->
- {W,H} = get_size(proplists:get_value(max_size, Config)),
- put(image_size, {W,H}),
- Im = egd:create(W, H),
- Fgc = egd:color({0,0,0}),
- ok = egd:filledRectangle(Im, get_point(), get_point(), Fgc),
-
- Bin = egd:render(Im, png),
- true = binary_is_png_compliant(Bin),
-
- ok = egd:destroy(Im),
- erase(image_size),
- ok.
-
-image_fans(Config) when is_list(Config) ->
- W = 1024,
- H = 800,
- Dir = proplists:get_value(priv_dir, Config),
-
- Fun = fun({F,Args},Im) ->
- erlang:apply(egd_primitives,F,[Im|Args])
- end,
-
- %% fan1
- Ops1 = gen_vertical_fan(1,{0,400},egd:color(red),1024,800,-15),
- Ops2 = gen_horizontal_fan(1,{512,800},egd:color(green),1024,0,-15),
-
- Im0 = egd_primitives:create(W,H),
- Im1 = lists:foldl(Fun, Im0, Ops1 ++ Ops2),
- Bin1 = egd_render:binary(Im1, opaque),
- Png1 = egd_png:binary(W,H,Bin1),
-
- File1 = filename:join(Dir,"fan1_opaque.png"),
- ok = egd:save(Png1,File1),
- ct:log("<p>Image opaque width 1:</p><img src=\"~s\" />~n", [File1]),
-
- Bin2 = egd_render:binary(Im1, alpha),
- Png2 = egd_png:binary(W,H,Bin2),
-
- File2 = filename:join(Dir,"fan1_alpha.png"),
- ok = egd:save(Png2,File2),
- ct:log("<p>Image alpha width 1:</p><img src=\"~s\" />~n", [File2]),
-
-
- %% fan2
- Ops3 = gen_vertical_fan(7,{0,400},egd:color(red),1024,800,-15),
- Ops4 = gen_horizontal_fan(7,{512,800},egd:color(green),1024,0,-15),
-
- Im2 = lists:foldl(Fun, Im0, Ops3 ++ Ops4),
- Bin3 = egd_render:binary(Im2, opaque),
- Png3 = egd_png:binary(W,H,Bin3),
-
- File3 = filename:join(Dir,"fan2_opaque.png"),
- ok = egd:save(Png3,File3),
- ct:log("<p>Image opaque width 7:</p><img src=\"~s\" />~n", [File3]),
-
- Bin4 = egd_render:binary(Im2, alpha),
- Png4 = egd_png:binary(W,H,Bin4),
-
- File4 = filename:join(Dir,"fan2_alpha.png"),
- ok = egd:save(Png4,File4),
- ct:log("<p>Image alpha width 7:</p><img src=\"~s\" />~n", [File4]),
- ok.
-
-gen_vertical_fan(Wd,Pt,C,X,Y,Step) when Y > 0 ->
- [{line,[Pt,{X,Y},Wd,C]}|gen_vertical_fan(Wd,Pt,C,X,Y + Step,Step)];
-gen_vertical_fan(_,_,_,_,_,_) -> [].
-
-gen_horizontal_fan(Wd,Pt,C,X,Y,Step) when X > 0 ->
- [{line,[Pt,{X,Y},Wd,C]}|gen_horizontal_fan(Wd,Pt,C,X + Step,Y,Step)];
-gen_horizontal_fan(_,_,_,_,_,_) -> [].
-
-
-%%----------------------------------------------------------------------
-%% Auxiliary tests
-%%----------------------------------------------------------------------
-
-bitmap_point_has_color(Bitmap, {W,_}, {X,Y}, C) ->
- {CR,CG,CB,_} = egd_primitives:rgb_float2byte(C),
- N = W*Y*3 + X*3,
- << _:N/binary, R,G,B, _/binary>> = Bitmap,
- case {R,G,B} of
- {CR,CG,CB} -> ok;
- Other ->
- io:format("bitmap_point_has_color: error color was ~p, should be ~p~n", [Other, {CR,CG,CB}]),
- {error, {Other,{CR,CG,CB}}}
- end.
-
-binary_is_png_compliant(PngBin) ->
- {Bin, _} = split_binary(PngBin, 10),
- List = binary_to_list(Bin),
- case lists:sublist(List, 2,3) of
- "PNG" -> true;
- Other ->
- io:format("img -> ~p~n", [Other]),
- false
- end.
-
-%%----------------------------------------------------------------------
-%% Auxiliary
-%%----------------------------------------------------------------------
-
-
-get_rgb() ->
- R = random(255),
- G = random(255),
- B = random(255),
- {R,G,B}.
-
-get_angle() ->
- random(359).
-
-get_point() ->
- get_point(get(image_size)).
-get_point({W,H}) ->
- X = random(W - 1),
- Y = random(H - 1),
- {X,Y}.
-
-get_size(Max) ->
- W = trunc(random(Max/2) + Max/2 + 1),
- H = trunc(random(Max/2) + Max/2 + 1),
- io:format("Image size will be ~p x ~p~n", [W,H]),
- {W,H}.
-
-get_points(N) ->
- get_points(N, []).
-get_points(0, Out) ->
- Out;
-get_points(N, Out) ->
- get_points(N - 1, [get_point() | Out]).
-
-random(N) -> trunc(rand:uniform(trunc(N + 1)) - 1).
diff --git a/lib/percept/test/ipc_tree.erl b/lib/percept/test/ipc_tree.erl
deleted file mode 100644
index 29da20e83f..0000000000
--- a/lib/percept/test/ipc_tree.erl
+++ /dev/null
@@ -1,49 +0,0 @@
-%% ``Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
-%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
-%% AB. All Rights Reserved.''
-%%
-%% $Id$
-%%
-
--module(ipc_tree).
--export([go/1, init/2]).
-
-go(N) ->
- start(N, self()),
- receive stop -> ok end.
-
-start(Depth, ParentPid) ->
- spawn(?MODULE, init, [Depth, ParentPid]).
-
-init(0, ParentPid) ->
- workload(5000),
- ParentPid ! stop,
- ok;
-init(Depth, ParentPid) ->
- Pid1 = spawn(?MODULE, init, [Depth - 1, self()]),
- Pid2 = spawn(?MODULE, init, [Depth - 1, self()]),
- main([Pid1,Pid2], ParentPid).
-
-main(Pids, ParentPid) ->
- workload(5000),
- gather(Pids),
- ParentPid ! stop,
- ok.
-
-gather([]) -> ok;
-gather([_|Pids]) -> receive _ -> gather(Pids) end.
-
-workload(0) -> ok;
-workload(N) -> _ = math:sin(2), workload(N - 1).
diff --git a/lib/percept/test/percept.cover b/lib/percept/test/percept.cover
deleted file mode 100644
index 8a5ad0a55e..0000000000
--- a/lib/percept/test/percept.cover
+++ /dev/null
@@ -1,2 +0,0 @@
-{incl_app,percept,details}.
-
diff --git a/lib/percept/test/percept.spec b/lib/percept/test/percept.spec
deleted file mode 100644
index f3ef76bd60..0000000000
--- a/lib/percept/test/percept.spec
+++ /dev/null
@@ -1 +0,0 @@
-{suites,"../percept_test",all}.
diff --git a/lib/percept/test/percept_SUITE.erl b/lib/percept/test/percept_SUITE.erl
deleted file mode 100644
index 2be8b70e0d..0000000000
--- a/lib/percept/test/percept_SUITE.erl
+++ /dev/null
@@ -1,126 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(percept_SUITE).
--include_lib("common_test/include/ct.hrl").
-
-%% Test server specific exports
--export([all/0, suite/0]).
-
-%% Test cases
--export([app/1,
- appup/1,
- profile/1,
- analyze/1,
- analyze_dist/1,
- webserver/1]).
-
-suite() ->
- [{ct_hooks,[ts_install_cth]},
- {timetrap, {minutes, 2}}].
-
-all() ->
- [app, appup, webserver, profile,
- analyze, analyze_dist].
-
-
-%%----------------------------------------------------------------------
-%% Tests
-%%----------------------------------------------------------------------
-
-%% Test that the percept app file is ok
-app(Config) when is_list(Config) ->
- ok = test_server:app_test(percept).
-
-%% Test that the percept appup file is ok
-appup(Config) when is_list(Config) ->
- ok = test_server:appup_test(percept).
-
-%% Percept webserver test.
-webserver(Config) when is_list(Config) ->
- % Explicit start inets?
- {started, _, Port} = percept:start_webserver(),
- ok = percept:stop_webserver(Port),
- {started, _, _} = percept:start_webserver(),
- ok = percept:stop_webserver(),
- {started, _, NewPort} = percept:start_webserver(),
- ok = percept:stop_webserver(NewPort),
- application:stop(inets),
- ok.
-
-%% Percept profile test.
-profile(Config) when is_list(Config) ->
- Path = proplists:get_value(data_dir, Config),
- File = filename:join([Path,"profile_test.dat"]),
- {ok, _} = percept:profile(File, [procs]),
- ipc_tree:go(7),
- ok = percept:stop_profile(),
- ok.
-
-%% Percept analyze test.
-analyze(Config) when is_list(Config) ->
- Begin = processes(),
- Path = proplists:get_value(data_dir, Config),
- File = filename:join([Path,"profile_test.dat"]),
- T0 = erlang:monotonic_time(millisecond),
- ok = percept:analyze(File),
- T1 = erlang:monotonic_time(millisecond),
- io:format("percept:analyze/1 took ~w ms.~n", [T1 - T0]),
- {stopped, _} = percept_db:stop(),
- print_remainers(remainers(Begin, processes())),
- ok.
-
-%% Percept analyze distribution test.
-analyze_dist(Config) when is_list(Config) ->
- Begin = processes(),
- Path = proplists:get_value(data_dir, Config),
- File = filename:join([Path,"ipc-dist.dat"]),
- T0 = erlang:monotonic_time(millisecond),
- ok = percept:analyze(File),
- T1 = erlang:monotonic_time(millisecond),
- io:format("percept:analyze/1 took ~w ms.~n", [T1 - T0]),
- {stopped, _} = percept_db:stop(),
- print_remainers(remainers(Begin, processes())),
- ok.
-
-%%----------------------------------------------------------------------
-%% Auxiliary tests
-%%----------------------------------------------------------------------
-
-%%----------------------------------------------------------------------
-%% Auxiliary
-%%----------------------------------------------------------------------
-
-print_remainers([]) -> ok;
-print_remainers([Pid|Pids]) ->
- io:format("[Pid ~p] [Entry ~p] [Name ~p]~n", [
- Pid,
- erlang:process_info(Pid, initial_call),
- erlang:process_info(Pid, registered_name)
- ]),
- print_remainers(Pids).
-
-remainers(Begin, End) -> remainers(Begin, End, []).
-remainers(_, [], Out) -> lists:reverse(Out);
-remainers(Begin, [Pid|End], Out) ->
- case lists:member(Pid, Begin) of
- true -> remainers(Begin, End, Out);
- false -> remainers(Begin, End, [Pid|Out])
- end.
diff --git a/lib/percept/test/percept_SUITE_data/ipc-dist.dat b/lib/percept/test/percept_SUITE_data/ipc-dist.dat
deleted file mode 100644
index 14ab6c0c5d..0000000000
--- a/lib/percept/test/percept_SUITE_data/ipc-dist.dat
+++ /dev/null
Binary files differ
diff --git a/lib/percept/test/percept_db_SUITE.erl b/lib/percept/test/percept_db_SUITE.erl
deleted file mode 100644
index b2827e0e42..0000000000
--- a/lib/percept/test/percept_db_SUITE.erl
+++ /dev/null
@@ -1,55 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2007-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
--module(percept_db_SUITE).
--include_lib("common_test/include/ct.hrl").
-
-%% Test server specific exports
--export([all/0, suite/0]).
-
-%% Test cases
--export([start/1]).
-
-%% Default timetrap timeout (set in init_per_testcase)
--define(restarts, 10).
--define(alive_timeout, 500).
-
-suite() ->
- [{timetrap, {minutes, 2}}].
-
-all() ->
- [start].
-
-%%----------------------------------------------------------------------
-%% Tests
-%%----------------------------------------------------------------------
-
-%% Percept_db start and restart test.
-start(Config) when is_list(Config) ->
- ok = restart(?restarts),
- {stopped, _DB} = percept_db:stop(),
- ok.
-
-restart(0)-> ok;
-restart(N)->
- {_, DB} = percept_db:start(),
- timer:sleep(?alive_timeout),
- true = erlang:is_process_alive(DB),
- restart(N-1).
diff --git a/lib/percept/vsn.mk b/lib/percept/vsn.mk
deleted file mode 100644
index 614cee8645..0000000000
--- a/lib/percept/vsn.mk
+++ /dev/null
@@ -1 +0,0 @@
-PERCEPT_VSN = 0.9
diff --git a/lib/public_key/asn1/PKCS-8.asn1 b/lib/public_key/asn1/PKCS-8.asn1
index 8412345b68..292a7b2029 100644
--- a/lib/public_key/asn1/PKCS-8.asn1
+++ b/lib/public_key/asn1/PKCS-8.asn1
@@ -26,7 +26,7 @@ BEGIN
-- This import is really unnecessary since ALGORITHM-IDENTIFIER is defined as a
-- TYPE-IDENTIFIER
--- Renome this import and replace all occurences of ALGORITHM-IDENTIFIER with
+-- Rename this import and replace all occurrences of ALGORITHM-IDENTIFIER with
-- TYPE-IDENTIFIER as a workaround for weaknesses in the ASN.1 compiler
--AlgorithmIdentifier, ALGORITHM-IDENTIFIER
-- FROM PKCS5v2-0 {iso(1) member-body(2) us(840) rsadsi(113549)
diff --git a/lib/public_key/doc/src/public_key.xml b/lib/public_key/doc/src/public_key.xml
index c503230d70..c97ec361d1 100644
--- a/lib/public_key/doc/src/public_key.xml
+++ b/lib/public_key/doc/src/public_key.xml
@@ -757,6 +757,39 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
</func>
<func>
+ <name>pkix_verify_hostname(Cert, ReferenceIDs) -> boolean()</name>
+ <name>pkix_verify_hostname(Cert, ReferenceIDs, Opts) -> boolean()</name>
+ <fsummary>Verifies that a PKIX x.509 certificate <i>presented identifier</i> (e.g hostname) is
+ an expected one.</fsummary>
+ <type>
+ <v>Cert = der_encoded() | #'OTPCertificate'{} </v>
+ <v>ReferenceIDs = [ RefID ]</v>
+ <v>RefID = {IdType,string()}</v>
+ <v>IdType = dns_id | srv_id | uri_id</v>
+ <v>Opts = [ PvhOpt() ]</v>
+ <v>PvhOpt = [MatchOpt | FailCallBackOpt | FqdnExtractOpt]</v>
+ <v>MatchOpt = {fun(RefId | FQDN::string(), PresentedID) -> boolean() | default}</v>
+ <v>PresentedID = {dNSName,string()} | {uniformResourceIdentifier,string()}</v>
+ <v>FailCallBackOpt = {fail_callback, fun(#'OTPCertificate'{}) -> boolean()}</v>
+ <v>FqdnExtractOpt = {fqdn_fun, fun(RefID) -> FQDN::string() | default | undefined}</v>
+ </type>
+ <desc>
+ <p>This function checks that the <i>Presented Identifier</i> (e.g hostname) in a peer certificate
+ conforms with the Expected Identifier that the client wants to connect to.
+ This functions is intended to be added as an extra client check to the peer certificate when performing
+ <seealso marker="public_key:public_key#pkix_path_validation-3">public_key:pkix_path_validation/3</seealso>
+ </p>
+ <p>See <url href="https://tools.ietf.org/html/rfc6125">RFC 6125</url>
+ for detailed information about hostname verification.
+ The <seealso marker="using_public_key#verify_hostname">User's Manual</seealso>
+ and
+ <seealso marker="using_public_key#verify_hostname_examples">code examples</seealso>
+ describes this function more detailed.
+ </p>
+ </desc>
+ </func>
+
+ <func>
<name>sign(Msg, DigestType, Key) -> binary()</name>
<fsummary>Creates a digital signature.</fsummary>
<type>
@@ -824,6 +857,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
<func>
<name>ssh_hostkey_fingerprint(HostKey) -> string()</name>
<name>ssh_hostkey_fingerprint(DigestType, HostKey) -> string()</name>
+ <name>ssh_hostkey_fingerprint([DigestType], HostKey) -> [string()]</name>
<fsummary>Calculates a ssh fingerprint for a hostkey.</fsummary>
<type>
<v>Key = public_key()</v>
@@ -847,6 +881,10 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
5> public_key:ssh_hostkey_fingerprint(sha256,Key).
"SHA256:aZGXhabfbf4oxglxltItWeHU7ub3Dc31NcNw2cMJePQ"
+
+ 6> public_key:ssh_hostkey_fingerprint([sha,sha256],Key).
+ ["SHA1:bSLY/C4QXLDL/Iwmhyg0PGW9UbY",
+ "SHA256:aZGXhabfbf4oxglxltItWeHU7ub3Dc31NcNw2cMJePQ"]
</code>
</desc>
</func>
diff --git a/lib/public_key/doc/src/using_public_key.xml b/lib/public_key/doc/src/using_public_key.xml
index e3a1eed4be..417d479da3 100644
--- a/lib/public_key/doc/src/using_public_key.xml
+++ b/lib/public_key/doc/src/using_public_key.xml
@@ -417,6 +417,259 @@ true = public_key:verify(Digest, none, Signature, PublicKey),</code>
</section>
+ <section>
+ <marker id="verify_hostname"></marker>
+ <title>Verifying a certificate hostname</title>
+ <section>
+ <title>Background</title>
+ <p>When a client checks a server certificate there are a number of checks available like
+ checks that the certificate is not revoked, not forged or not out-of-date.
+ </p>
+ <p>There are however attacks that are not detected by those checks. Suppose a bad guy has
+ succeded with a DNS infection. Then the client could belive it is connecting to one host but
+ ends up at another but evil one. Though it is evil, it could have a perfectly legal
+ certificate! The certificate has a valid signature, it is not revoked, the certificate chain
+ is not faked and has a trusted root and so on.
+ </p>
+ <p>To detect that the server is not the intended one, the client must additionaly perform
+ a <i>hostname verification</i>. This procedure is described in
+ <url href="https://tools.ietf.org/html/rfc6125">RFC 6125</url>. The idea is that the certificate
+ lists the hostnames it could be fetched from. This is checked by the certificate issuer when
+ the certificate is signed. So if the certificate is issued by a trusted root the client
+ could trust the host names signed in it.
+ </p>
+ <p>There is a default hostname matching procedure defined in
+ <url href="https://tools.ietf.org/html/rfc6125#section-6">RFC 6125, section 6</url>
+ as well as protocol dependent variations defined in
+ <url href="https://tools.ietf.org/html/rfc6125#appendix-B">RFC 6125 appendix B</url>.
+ The default procedure is implemented in
+ <seealso marker="public_key:public_key#pkix_verify_hostname-2">public_key:pkix_verify_hostname/2,3</seealso>.
+ It is possible for a client to hook in modified rules using the options list.
+ </p>
+ <p>Some terminology is needed: the certificate presents hostname(s) on which it is valid.
+ Those are called <i>Presented IDs</i>. The hostname(s) the client belives it connects to
+ are called <i>Reference IDs</i>. The matching rules aims to verify that there is at least
+ one of the Reference IDs that matches one of the Presented IDs. If not, the verification fails.
+ </p>
+ <p>The IDs contains normal fully qualified domain names like e.g <c>foo.example.com</c>,
+ but IP addresses are not recommended. The rfc describes why this is not recommended as well
+ as security considerations about how to aquire the Reference IDs.
+ </p>
+ <p>Internationalized domain names are not supported.
+ </p>
+ </section>
+ <section>
+ <title>The verification process</title>
+ <p>Traditionally the Presented IDs were found in the <c>Subject</c> certificate field as <c>CN</c>
+ names. This is still quite common. When printing a certificate they show up as:
+ </p>
+ <code>
+ $ openssl x509 -text &lt; cert.pem
+ ...
+ Subject: C=SE, CN=example.com, CN=*.example.com, O=erlang.org
+ ...
+ </code>
+ <p>The example <c>Subject</c> field has one C, two CN and one O part. It is only the
+ CN (Common Name) that is used by hostname verification. The two other (C and O) is not used
+ here even when they contain a domain name like the O part. The C and O parts are defined
+ elsewhere and meaningful only for other functions.
+ </p>
+ <p>In the example the Presented IDs are <c>example.com</c> as well as hostnames matching
+ <c>*.example.com</c>. For example <c>foo.example.com</c> and <c>bar.example.com</c> both
+ matches but not <c>foo.bar.example.com</c>. The name <c>erlang.org</c> matches neither
+ since it is not a CN.
+ </p>
+ <p>In case where the Presented IDs are fetched from the <c>Subject</c> certificate field, the
+ names may contain wildcard characters. The function handles this as defined in
+ <url href="https://tools.ietf.org/html/rfc6125#section-6.4.3">chapter 6.4.3 in RFC 6125</url>.
+ </p>
+ <p>There may only be one wildcard character and that is in the first label, for example:
+ <c>*.example.com</c>. This matches <c>foo.example.com</c> but neither <c>example.com</c> nor
+ <c>foo.bar.example.com</c>.
+ </p>
+ <p>There may be label characters before or/and after the wildcard. For example:
+ <c>a*d.example.com</c> matches <c>abcd.example.com</c> and <c>ad.example.com</c>,
+ but not <c>ab.cd.example.com</c>.
+ </p>
+ <p>In the previous example there is no indication of which protocols are expected. So a client
+ has no indication of whether it is a web server, an ldap server or maybe a sip server it is
+ connected to.
+ There are fields in the certificate that can indicate this. To be more exact, the rfc
+ introduces the usage of the <c>X509v3 Subject Alternative Name</c> in the <c>X509v3 extensions</c>
+ field:
+ </p>
+ <code>
+ $ openssl x509 -text &lt; cert.pem
+ ...
+ X509v3 extensions:
+ X509v3 Subject Alternative Name:
+ DNS:kb.example.org, URI:https://www.example.org
+ ...
+ </code>
+ <p>Here <c>kb.example.org</c> serves any protocol while <c>www.example.org</c> presents a secure
+ web server.
+ </p>
+
+ <p>The next example has both <c>Subject</c> and <c>Subject Alternate Name</c> present:</p>
+ <code>
+ $ openssl x509 -text &lt; cert.pem
+ ...
+ Subject: C=SE, CN=example.com, CN=*.example.com, O=erlang.org
+ ...
+ X509v3 extensions:
+ X509v3 Subject Alternative Name:
+ DNS:kb.example.org, URI:https://www.example.org
+ ...
+ </code>
+ <p>The RFC states that if a certificate defines Reference IDs in a <c>Subject Alternate Name</c>
+ field, the <c>Subject</c> field MUST NOT be used for host name checking, even if it contains
+ valid CN names.
+ Therefore only <c>kb.example.org</c> and <c>https://www.example.org</c> matches. The match fails
+ both for <c>example.com</c> and <c>foo.example.com</c> becuase they are in the <c>Subject</c>
+ field which is not checked because the <c>Subject Alternate Name</c> field is present.
+ </p>
+ </section>
+
+ <section>
+ <marker id="verify_hostname_examples"></marker>
+ <title>Function call examples</title>
+ <note>
+ <p>Other applications like ssl/tls or https might have options that are passed
+ down to the <c>public_key:pkix_verify_hostname</c>. You will probably not
+ have to call it directly</p>
+ </note>
+ <p>Suppose our client expects to connect to the web server https://www.example.net. This
+ URI is therefore the Reference IDs of the client.
+ The call will be:
+ </p>
+ <code>
+ public_key:pkix_verify_hostname(CertFromHost,
+ [{uri_id, "https://www.example.net"}
+ ]).
+ </code>
+ <p>The call will return <c>true</c> or <c>false</c> depending on the check. The caller
+ do not need to handle the matching rules in the rfc. The matching will proceed as:
+ </p>
+ <list>
+ <item>If there is a <c>Subject Alternate Name</c> field, the <c>{uri_id,string()}</c> in the
+ function call will be compared to any
+ <c>{uniformResourceIdentifier,string()}</c> in the Certificate field.
+ If the two <c>strings()</c> are equal (case insensitive), there is a match.
+ The same applies for any <c>{dns_id,string()}</c> in the call which is compared
+ with all <c>{dNSName,string()}</c> in the Certificate field.
+ </item>
+ <item>If there is NO <c>Subject Alternate Name</c> field, the <c>Subject</c> field will be
+ checked. All <c>CN</c> names will be compared to all hostnames <i>extracted</i> from
+ <c>{uri_id,string()}</c> and from <c>{dns_id,string()}</c>.
+ </item>
+ </list>
+ </section>
+ <section>
+ <title>Extending the search mechanism</title>
+ <p>The caller can use own extraction and matching rules. This is done with the two options
+ <c>fqdn_fun</c> and <c>match_fun</c>.
+ </p>
+ <section>
+ <title>Hostname extraction</title>
+ <p>The <c>fqdn_fun</c> extracts hostnames (Fully Qualified Domain Names) from uri_id
+ or other ReferenceIDs that are not pre-defined in the public_key function.
+ Suppose you have some URI with a very special protocol-part:
+ <c>myspecial://example.com"</c>. Since this a non-standard URI there will be no hostname
+ extracted for matching CN-names in the <c>Subject</c>.</p>
+ <p>To "teach" the function how to extract, you can give a fun which replaces the default
+ extraction function.
+ The <c>fqdn_fun</c> takes one argument and returns
+ either a <c>string()</c> to be matched to each CN-name or the atom <c>default</c> which will invoke
+ the default fqdn extraction function. The return value <c>undefined</c> removes the current
+ URI from the fqdn extraction.
+ </p>
+ <code>
+ ...
+ Extract = fun({uri_id, "myspecial://"++HostName}) -> HostName;
+ (_Else) -> default
+ end,
+ ...
+ public_key:pkix_verify_hostname(CertFromHost, RefIDs,
+ [{fqdn_fun, Extract}])
+ ...
+ </code>
+ </section>
+ <section>
+ <title>Re-defining the match operations</title>
+ <p>The default matching handles dns_id and uri_id. In an uri_id the value is tested for
+ equality with a value from the <c>Subject Alternate Name</c>. If som other kind of matching
+ is needed, use the <c>match_fun</c> option.
+ </p>
+ <p>The <c>match_fun</c> takes two arguments and returns either <c>true</c>,
+ <c>false</c> or <c>default</c>. The value <c>default</c> will invoke the default
+ match function.
+ </p>
+ <code>
+ ...
+ Match = fun({uri_id,"myspecial://"++A},
+ {uniformResourceIdentifier,"myspecial://"++B}) ->
+ my_match(A,B);
+ (_RefID, _PresentedID) ->
+ default
+ end,
+ ...
+ public_key:pkix_verify_hostname(CertFromHost, RefIDs,
+ [{match_fun, Match}]),
+ ...
+ </code>
+ <p>In case of a match operation between a ReferenceID and a CN value from the <c>Subject</c>
+ field, the first argument to the fun is the extracted hostname from the ReferenceID, and the
+ second argument is the tuple <c>{cn, string()}</c> taken from the <c>Subject</c> field. That
+ makes it possible to have separate matching rules for Presented IDs from the <c>Subject</c>
+ field and from the <c>Subject Alternate Name</c> field.
+ </p>
+ <p>The default matching transformes the ascii values in strings to lowercase before comparing.
+ The <c>match_fun</c> is however called without any transfomation applied to the strings. The
+ reason is to enable the user to do unforseen handling of the strings where the original format
+ is needed.
+ </p>
+ </section>
+ </section>
+ <section>
+ <title>"Pinning" a Certificate</title>
+ <p>The <url href="https://tools.ietf.org/html/rfc6125">RFC 6125</url> defines <i>pinning</i>
+ as:</p>
+ <quote>
+ <p>"The act of establishing a cached name association between
+ the application service's certificate and one of the client's
+ reference identifiers, despite the fact that none of the presented
+ identifiers matches the given reference identifier. ..."
+ </p>
+ </quote>
+ <p>The purpose is to have a mechanism for a human to accept an otherwise faulty Certificate.
+ In for example a web browser, you could get a question like </p>
+ <quote>
+ <p>Warning: you wanted to visit the site www.example.com,
+ but the certificate is for shop.example.com. Accept anyway (yes/no)?"
+ </p>
+ </quote>
+ <p>This could be accomplished with the option <c>fail_callback</c> which will
+ be called if the hostname verification fails:
+ </p>
+ <code>
+ -include_lib("public_key/include/public_key.hrl"). % Record def
+ ...
+ Fail = fun(#'OTPCertificate'{}=C) ->
+ case in_my_cache(C) orelse my_accept(C) of
+ true ->
+ enter_my_cache(C),
+ true;
+ false ->
+ false
+ end,
+ ...
+ public_key:pkix_verify_hostname(CertFromHost, RefIDs,
+ [{fail_callback, Fail}]),
+ ...
+ </code>
+ </section>
+ </section>
+
<section>
<title>SSH Files</title>
diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl
index 3d6238d998..50d4d82d15 100644
--- a/lib/public_key/src/public_key.erl
+++ b/lib/public_key/src/public_key.erl
@@ -48,6 +48,7 @@
pkix_issuer_id/2,
pkix_normalize_name/1,
pkix_path_validation/3,
+ pkix_verify_hostname/2, pkix_verify_hostname/3,
ssh_decode/2, ssh_encode/2,
ssh_hostkey_fingerprint/1, ssh_hostkey_fingerprint/2,
ssh_curvename2oid/1, oid2ssh_curvename/1,
@@ -763,6 +764,76 @@ pkix_crls_validate(OtpCert, DPAndCRLs0, Options) ->
pkix_crls_validate(OtpCert, DPAndCRLs, DPAndCRLs,
Options, pubkey_crl:init_revokation_state()).
+%--------------------------------------------------------------------
+-spec pkix_verify_hostname(Cert :: #'OTPCertificate'{} | binary(),
+ ReferenceIDs :: [{uri_id | dns_id | oid(), string()}]) -> boolean().
+
+-spec pkix_verify_hostname(Cert :: #'OTPCertificate'{} | binary(),
+ ReferenceIDs :: [{uri_id | dns_id | oid(), string()}],
+ Options :: proplists:proplist()) -> boolean().
+
+%% Description: Validates a hostname to RFC 6125
+%%--------------------------------------------------------------------
+pkix_verify_hostname(Cert, ReferenceIDs) ->
+ pkix_verify_hostname(Cert, ReferenceIDs, []).
+
+pkix_verify_hostname(BinCert, ReferenceIDs, Options) when is_binary(BinCert) ->
+ pkix_verify_hostname(pkix_decode_cert(BinCert,otp), ReferenceIDs, Options);
+
+pkix_verify_hostname(Cert = #'OTPCertificate'{tbsCertificate = TbsCert}, ReferenceIDs0, Opts) ->
+ MatchFun = proplists:get_value(match_fun, Opts, undefined),
+ FailCB = proplists:get_value(fail_callback, Opts, fun(_Cert) -> false end),
+ FqdnFun = proplists:get_value(fqdn_fun, Opts, fun verify_hostname_extract_fqdn_default/1),
+
+ ReferenceIDs = [{T,to_string(V)} || {T,V} <- ReferenceIDs0],
+ PresentedIDs =
+ try lists:keyfind(?'id-ce-subjectAltName',
+ #'Extension'.extnID,
+ TbsCert#'OTPTBSCertificate'.extensions)
+ of
+ #'Extension'{extnValue = ExtVals} ->
+ [{T,to_string(V)} || {T,V} <- ExtVals];
+ false ->
+ []
+ catch
+ _:_ -> []
+ end,
+ %% PresentedIDs example: [{dNSName,"ewstest.ericsson.com"}, {dNSName,"www.ericsson.com"}]}
+ case PresentedIDs of
+ [] ->
+ %% Fallback to CN-ids [rfc6125, ch6]
+ case TbsCert#'OTPTBSCertificate'.subject of
+ {rdnSequence,RDNseq} ->
+ PresentedCNs =
+ [{cn, to_string(V)}
+ || ATVs <- RDNseq, % RDNseq is list-of-lists
+ #'AttributeTypeAndValue'{type = ?'id-at-commonName',
+ value = {_T,V}} <- ATVs
+ % _T = kind of string (teletexString etc)
+ ],
+ %% Example of PresentedCNs: [{cn,"www.ericsson.se"}]
+ %% match ReferenceIDs to PresentedCNs
+ verify_hostname_match_loop(verify_hostname_fqnds(ReferenceIDs, FqdnFun),
+ PresentedCNs,
+ MatchFun, FailCB, Cert);
+
+ _ ->
+ false
+ end;
+ _ ->
+ %% match ReferenceIDs to PresentedIDs
+ case verify_hostname_match_loop(ReferenceIDs, PresentedIDs,
+ MatchFun, FailCB, Cert) of
+ false ->
+ %% Try to extract DNS-IDs from URIs etc
+ DNS_ReferenceIDs =
+ [{dns_is,X} || X <- verify_hostname_fqnds(ReferenceIDs, FqdnFun)],
+ verify_hostname_match_loop(DNS_ReferenceIDs, PresentedIDs,
+ MatchFun, FailCB, Cert);
+ true ->
+ true
+ end
+ end.
%%--------------------------------------------------------------------
-spec ssh_decode(binary(), public_key | ssh_file()) -> [{public_key(), Attributes::list()}]
@@ -822,21 +893,31 @@ oid2ssh_curvename(?'secp521r1') -> <<"nistp521">>.
%%--------------------------------------------------------------------
-spec ssh_hostkey_fingerprint(public_key()) -> string().
--spec ssh_hostkey_fingerprint(digest_type(), public_key()) -> string().
+-spec ssh_hostkey_fingerprint( digest_type(), public_key()) -> string()
+ ; ([digest_type()], public_key()) -> [string()]
+ .
ssh_hostkey_fingerprint(Key) ->
- sshfp_string(md5, Key).
+ sshfp_string(md5, public_key:ssh_encode(Key,ssh2_pubkey) ).
-ssh_hostkey_fingerprint(HashAlg, Key) ->
- lists:concat([sshfp_alg_name(HashAlg),
- [$: | sshfp_string(HashAlg, Key)]
- ]).
+ssh_hostkey_fingerprint(HashAlgs, Key) when is_list(HashAlgs) ->
+ EncKey = public_key:ssh_encode(Key, ssh2_pubkey),
+ [sshfp_full_string(HashAlg,EncKey) || HashAlg <- HashAlgs];
+ssh_hostkey_fingerprint(HashAlg, Key) when is_atom(HashAlg) ->
+ EncKey = public_key:ssh_encode(Key, ssh2_pubkey),
+ sshfp_full_string(HashAlg, EncKey).
-sshfp_string(HashAlg, Key) ->
+
+sshfp_string(HashAlg, EncodedKey) ->
%% Other HashAlgs than md5 will be printed with
%% other formats than hextstr by
%% ssh-keygen -E <alg> -lf <file>
- fp_fmt(sshfp_fmt(HashAlg), crypto:hash(HashAlg, public_key:ssh_encode(Key,ssh2_pubkey))).
+ fp_fmt(sshfp_fmt(HashAlg), crypto:hash(HashAlg, EncodedKey)).
+
+sshfp_full_string(HashAlg, EncKey) ->
+ lists:concat([sshfp_alg_name(HashAlg),
+ [$: | sshfp_string(HashAlg, EncKey)]
+ ]).
sshfp_alg_name(sha) -> "SHA1";
sshfp_alg_name(Alg) -> string:to_upper(atom_to_list(Alg)).
@@ -1197,3 +1278,96 @@ ascii_to_lower(String) ->
end)>>
||
<<C>> <= iolist_to_binary(String) >>.
+
+%%%----------------------------------------------------------------
+%%% pkix_verify_hostname help functions
+verify_hostname_extract_fqdn_default({dns_id,S}) ->
+ S;
+verify_hostname_extract_fqdn_default({uri_id,URI}) ->
+ {ok,{https,_,Host,_,_,_}} = http_uri:parse(URI),
+ Host.
+
+
+verify_hostname_fqnds(L, FqdnFun) ->
+ [E || E0 <- L,
+ E <- [try case FqdnFun(E0) of
+ default -> verify_hostname_extract_fqdn_default(E0);
+ undefined -> undefined; % will make the "is_list(E)" test fail
+ Other -> Other
+ end
+ catch _:_-> undefined % will make the "is_list(E)" test fail
+ end],
+ is_list(E),
+ E =/= "",
+ {error,einval} == inet:parse_address(E)
+ ].
+
+
+-define(srvName_OID, {1,3,6,1,4,1,434,2,2,1,37,0}).
+
+verify_hostname_match_default(Ref, Pres) ->
+ verify_hostname_match_default0(to_lower_ascii(Ref), to_lower_ascii(Pres)).
+
+verify_hostname_match_default0(FQDN=[_|_], {cn,FQDN}) ->
+ not lists:member($*, FQDN);
+verify_hostname_match_default0(FQDN=[_|_], {cn,Name=[_|_]}) ->
+ [F1|Fs] = string:tokens(FQDN, "."),
+ [N1|Ns] = string:tokens(Name, "."),
+ match_wild(F1,N1) andalso Fs==Ns;
+verify_hostname_match_default0({dns_id,R}, {dNSName,P}) ->
+ R==P;
+verify_hostname_match_default0({uri_id,R}, {uniformResourceIdentifier,P}) ->
+ R==P;
+verify_hostname_match_default0({srv_id,R}, {T,P}) when T == srvName ;
+ T == ?srvName_OID ->
+ R==P;
+verify_hostname_match_default0(_, _) ->
+ false.
+
+
+match_wild(A, [$*|B]) -> match_wild_suffixes(A, B);
+match_wild([C|A], [ C|B]) -> match_wild(A, B);
+match_wild([], []) -> true;
+match_wild(_, _) -> false.
+
+%% Match the parts after the only wildcard by comparing them from the end
+match_wild_suffixes(A, B) -> match_wild_sfx(lists:reverse(A), lists:reverse(B)).
+
+match_wild_sfx([$*|_], _) -> false; % Bad name (no wildcards alowed)
+match_wild_sfx(_, [$*|_]) -> false; % Bad pattern (no more wildcards alowed)
+match_wild_sfx([A|Ar], [A|Br]) -> match_wild_sfx(Ar, Br);
+match_wild_sfx(Ar, []) -> not lists:member($*, Ar); % Chk for bad name (= wildcards)
+match_wild_sfx(_, _) -> false.
+
+
+verify_hostname_match_loop(Refs0, Pres0, undefined, FailCB, Cert) ->
+ Pres = lists:map(fun to_lower_ascii/1, Pres0),
+ Refs = lists:map(fun to_lower_ascii/1, Refs0),
+ lists:any(
+ fun(R) ->
+ lists:any(fun(P) ->
+ verify_hostname_match_default(R,P) orelse FailCB(Cert)
+ end, Pres)
+ end, Refs);
+verify_hostname_match_loop(Refs, Pres, MatchFun, FailCB, Cert) ->
+ lists:any(
+ fun(R) ->
+ lists:any(fun(P) ->
+ (case MatchFun(R,P) of
+ default -> verify_hostname_match_default(R,P);
+ Bool -> Bool
+ end) orelse FailCB(Cert)
+ end,
+ Pres)
+ end,
+ Refs).
+
+
+to_lower_ascii(S) when is_list(S) -> lists:map(fun to_lower_ascii/1, S);
+to_lower_ascii({T,S}) -> {T, to_lower_ascii(S)};
+to_lower_ascii(C) when $A =< C,C =< $Z -> C + ($a-$A);
+to_lower_ascii(C) -> C.
+
+to_string(S) when is_list(S) -> S;
+to_string(B) when is_binary(B) -> binary_to_list(B).
+
diff --git a/lib/public_key/test/public_key_SUITE.erl b/lib/public_key/test/public_key_SUITE.erl
index cd24819899..68aa152911 100644
--- a/lib/public_key/test/public_key_SUITE.erl
+++ b/lib/public_key/test/public_key_SUITE.erl
@@ -45,13 +45,17 @@ all() ->
{group, sign_verify},
pkix, pkix_countryname, pkix_emailaddress, pkix_path_validation,
pkix_iso_rsa_oid, pkix_iso_dsa_oid, pkix_crl, general_name,
+ pkix_verify_hostname_cn,
+ pkix_verify_hostname_subjAltName,
+ pkix_verify_hostname_options,
short_cert_issuer_hash, short_crl_issuer_hash,
ssh_hostkey_fingerprint_md5_implicit,
ssh_hostkey_fingerprint_md5,
ssh_hostkey_fingerprint_sha,
ssh_hostkey_fingerprint_sha256,
ssh_hostkey_fingerprint_sha384,
- ssh_hostkey_fingerprint_sha512
+ ssh_hostkey_fingerprint_sha512,
+ ssh_hostkey_fingerprint_list
].
groups() ->
@@ -90,20 +94,21 @@ end_per_group(_GroupName, Config) ->
%%-------------------------------------------------------------------
init_per_testcase(TestCase, Config) ->
case TestCase of
- ssh_hostkey_fingerprint_md5_implicit -> init_fingerprint_testcase(md5, Config);
- ssh_hostkey_fingerprint_md5 -> init_fingerprint_testcase(md5, Config);
- ssh_hostkey_fingerprint_sha -> init_fingerprint_testcase(sha, Config);
- ssh_hostkey_fingerprint_sha256 -> init_fingerprint_testcase(sha256, Config);
- ssh_hostkey_fingerprint_sha384 -> init_fingerprint_testcase(sha384, Config);
- ssh_hostkey_fingerprint_sha512 -> init_fingerprint_testcase(sha512, Config);
+ ssh_hostkey_fingerprint_md5_implicit -> init_fingerprint_testcase([md5], Config);
+ ssh_hostkey_fingerprint_md5 -> init_fingerprint_testcase([md5], Config);
+ ssh_hostkey_fingerprint_sha -> init_fingerprint_testcase([sha], Config);
+ ssh_hostkey_fingerprint_sha256 -> init_fingerprint_testcase([sha256], Config);
+ ssh_hostkey_fingerprint_sha384 -> init_fingerprint_testcase([sha384], Config);
+ ssh_hostkey_fingerprint_sha512 -> init_fingerprint_testcase([sha512], Config);
+ ssh_hostkey_fingerprint_list -> init_fingerprint_testcase([sha,md5], Config);
_ -> init_common_per_testcase(Config)
end.
-init_fingerprint_testcase(Alg, Config) ->
- CryptoSupports = lists:member(Alg, proplists:get_value(hashs, crypto:supports())),
- case CryptoSupports of
- false -> {skip,{Alg,not_supported}};
- true -> init_common_per_testcase(Config)
+init_fingerprint_testcase(Algs, Config) ->
+ Hashs = proplists:get_value(hashs, crypto:supports(), []),
+ case Algs -- Hashs of
+ [] -> init_common_per_testcase(Config);
+ UnsupportedAlgs -> {skip,{UnsupportedAlgs,not_supported}}
end.
init_common_per_testcase(Config0) ->
@@ -597,6 +602,14 @@ ssh_hostkey_fingerprint_sha512(_Config) ->
Expected = public_key:ssh_hostkey_fingerprint(sha512, ssh_hostkey(rsa)).
%%--------------------------------------------------------------------
+%% Since this kind of fingerprint is not available yet on standard
+%% distros, we do like this instead.
+ssh_hostkey_fingerprint_list(_Config) ->
+ Expected = ["SHA1:Soammnaqg06jrm2jivMSnzQGlmk",
+ "MD5:4b:0b:63:de:0f:a7:3a:ab:2c:cc:2d:d1:21:37:1d:3a"],
+ Expected = public_key:ssh_hostkey_fingerprint([sha,md5], ssh_hostkey(rsa)).
+
+%%--------------------------------------------------------------------
encrypt_decrypt() ->
[{doc, "Test public_key:encrypt_private and public_key:decrypt_public"}].
encrypt_decrypt(Config) when is_list(Config) ->
@@ -814,6 +827,114 @@ pkix_path_validation(Config) when is_list(Config) ->
ok.
%%--------------------------------------------------------------------
+%% To generate the PEM file contents:
+%%
+%% openssl req -x509 -nodes -newkey rsa:1024 -keyout /dev/null -subj '/C=SE/CN=example.com/CN=*.foo.example.com/CN=a*b.bar.example.com/O=erlang.org' > public_key_SUITE_data/pkix_verify_hostname_cn.pem
+%%
+%% Note that the same pem-file is used in pkix_verify_hostname_options/1
+%%
+%% Subject: C=SE, CN=example.com, CN=*.foo.example.com, CN=a*b.bar.example.com, O=erlang.org
+%% extensions = no subjAltName
+
+pkix_verify_hostname_cn(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ {ok,Bin} = file:read_file(filename:join(DataDir,"pkix_verify_hostname_cn.pem")),
+ Cert = public_key:pkix_decode_cert(element(2,hd(public_key:pem_decode(Bin))), otp),
+
+ %% Check that 1) only CNs are checked,
+ %% 2) an empty label does not match a wildcard and
+ %% 3) a wildcard does not match more than one label
+ false = public_key:pkix_verify_hostname(Cert, [{dns_id,"erlang.org"},
+ {dns_id,"foo.EXAMPLE.com"},
+ {dns_id,"b.a.foo.EXAMPLE.com"}]),
+
+ %% Check that a hostname is extracted from a https-uri and used for checking:
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"HTTPS://EXAMPLE.com"}]),
+
+ %% Check wildcard matching one label:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"a.foo.EXAMPLE.com"}]),
+
+ %% Check wildcard with surrounding chars matches one label:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"accb.bar.EXAMPLE.com"}]),
+
+ %% Check that a wildcard with surrounding chars matches an empty string:
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"https://ab.bar.EXAMPLE.com"}]).
+
+%%--------------------------------------------------------------------
+%% To generate the PEM file contents:
+%%
+%% openssl req -x509 -nodes -newkey rsa:1024 -keyout /dev/null -extensions SAN -config public_key_SUITE_data/verify_hostname.conf 2>/dev/null > public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem
+%%
+%% Subject: C=SE, CN=example.com
+%% Subject Alternative Name: DNS:kb.example.org, URI:http://www.example.org, URI:https://wws.example.org
+
+pkix_verify_hostname_subjAltName(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ {ok,Bin} = file:read_file(filename:join(DataDir,"pkix_verify_hostname_subjAltName.pem")),
+ Cert = public_key:pkix_decode_cert(element(2,hd(public_key:pem_decode(Bin))), otp),
+
+ %% Check that neither a uri nor dns hostname matches a CN if subjAltName is present:
+ false = public_key:pkix_verify_hostname(Cert, [{uri_id,"https://example.com"},
+ {dns_id,"example.com"}]),
+
+ %% Check that a uri_id matches a URI subjAltName:
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"https://wws.example.org"}]),
+
+ %% Check that a dns_id does not match a URI subjAltName:
+ false = public_key:pkix_verify_hostname(Cert, [{dns_id,"www.example.org"},
+ {dns_id,"wws.example.org"}]),
+
+ %% Check that a dns_id matches a DNS subjAltName:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"kb.example.org"}]).
+
+%%--------------------------------------------------------------------
+%% Uses the pem-file for pkix_verify_hostname_cn
+%% Subject: C=SE, CN=example.com, CN=*.foo.example.com, CN=a*b.bar.example.com, O=erlang.org
+pkix_verify_hostname_options(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ {ok,Bin} = file:read_file(filename:join(DataDir,"pkix_verify_hostname_cn.pem")),
+ Cert = public_key:pkix_decode_cert(element(2,hd(public_key:pem_decode(Bin))), otp),
+
+ %% Check that the fail_callback is called and is presented the correct certificate:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"erlang.org"}],
+ [{fail_callback,
+ fun(#'OTPCertificate'{}=C) when C==Cert ->
+ true; % To test the return value matters
+ (#'OTPCertificate'{}=C) ->
+ ct:log("~p:~p: Wrong cert:~n~p~nExpect~n~p",
+ [?MODULE, ?LINE, C, Cert]),
+ ct:fail("Wrong cert, see log");
+ (C) ->
+ ct:log("~p:~p: Bad cert: ~p",[?MODULE,?LINE,C]),
+ ct:fail("Bad cert, see log")
+ end}]),
+
+ %% Check the callback for user-provided match functions:
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"very.wrong.domain"}],
+ [{match_fun,
+ fun("very.wrong.domain", {cn,"example.com"}) ->
+ true;
+ (_, _) ->
+ false
+ end}]),
+ false = public_key:pkix_verify_hostname(Cert, [{dns_id,"not.example.com"}],
+ [{match_fun, fun(_, _) -> default end}]),
+ true = public_key:pkix_verify_hostname(Cert, [{dns_id,"example.com"}],
+ [{match_fun, fun(_, _) -> default end}]),
+
+ %% Check the callback for user-provided fqdn extraction:
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"some://very.wrong.domain"}],
+ [{fqdn_fun,
+ fun({uri_id, "some://very.wrong.domain"}) ->
+ "example.com";
+ (_) ->
+ ""
+ end}]),
+ true = public_key:pkix_verify_hostname(Cert, [{uri_id,"https://example.com"}],
+ [{fqdn_fun, fun(_) -> default end}]),
+ false = public_key:pkix_verify_hostname(Cert, [{uri_id,"some://very.wrong.domain"}]).
+
+%%--------------------------------------------------------------------
pkix_iso_rsa_oid() ->
[{doc, "Test workaround for supporting certs that use ISO oids"
" 1.3.14.3.2.29 instead of PKIX/PKCS oid"}].
diff --git a/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_cn.pem b/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_cn.pem
new file mode 100644
index 0000000000..9f7b428f9a
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_cn.pem
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE-----
+MIICsjCCAhugAwIBAgIJAMCGx1ezaJFRMA0GCSqGSIb3DQEBCwUAMHIxCzAJBgNV
+BAYTAlNFMRQwEgYDVQQDDAtleGFtcGxlLmNvbTEaMBgGA1UEAwwRKi5mb28uZXhh
+bXBsZS5jb20xHDAaBgNVBAMME2EqYi5iYXIuZXhhbXBsZS5jb20xEzARBgNVBAoM
+CmVybGFuZy5vcmcwHhcNMTYxMjIwMTUwNDUyWhcNMTcwMTE5MTUwNDUyWjByMQsw
+CQYDVQQGEwJTRTEUMBIGA1UEAwwLZXhhbXBsZS5jb20xGjAYBgNVBAMMESouZm9v
+LmV4YW1wbGUuY29tMRwwGgYDVQQDDBNhKmIuYmFyLmV4YW1wbGUuY29tMRMwEQYD
+VQQKDAplcmxhbmcub3JnMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDVGJgZ
+defGucvMXf0RrEm6Hb18IfVUo9IV6swSP/kwAu/608ZIZdzlfp2pxC0e72a4E3WN
+4vrGxAr2wMMQOiyoy4qlAeLX27THJ6Q4Vl82fc6QuOJbScKIydSZ4KoB+luGlBu5
+b6xYh2pBbneKFpsecmK5rsWtTactjD4n1tKjUwIDAQABo1AwTjAdBgNVHQ4EFgQU
+OCtzidUeaDva7qp12T0CQrgfLW4wHwYDVR0jBBgwFoAUOCtzidUeaDva7qp12T0C
+QrgfLW4wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOBgQCAz+ComCMo9Qbu
+PHxG7pv3mQvoxrMFva/Asg4o9mW2mDyrk0DwI4zU8vMHbSRKSBYGm4TATXsQkDQT
+gJw/bxhISnhZZtPC7Yup8kJCkJ6S6EDLYrlzgsRqfeU6jWim3nbfaLyMi9dHFDMk
+HULnyNNW3qxTEKi8Wo2sCMej4l7KFg==
+-----END CERTIFICATE-----
diff --git a/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem b/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem
new file mode 100644
index 0000000000..83e1ad37b3
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/pkix_verify_hostname_subjAltName.pem
@@ -0,0 +1,14 @@
+-----BEGIN CERTIFICATE-----
+MIICEjCCAXugAwIBAgIJANwliLph5EiAMA0GCSqGSIb3DQEBCwUAMCMxCzAJBgNV
+BAYTAlNFMRQwEgYDVQQDEwtleGFtcGxlLmNvbTAeFw0xNjEyMjAxNTEyMjRaFw0x
+NzAxMTkxNTEyMjRaMCMxCzAJBgNVBAYTAlNFMRQwEgYDVQQDEwtleGFtcGxlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAydstIN157w8QxkVaOl3wm81j
+fgZ8gqO3BXkECPF6bw5ewLlmePL6Qs4RypsaRe7cKJ9rHFlwhpdcYkxWSWEt2N7Z
+Ry3N4SjuU04ohWbYgy3ijTt7bJg7jOV1Dh56BnI4hwhQj0oNFizNZOeRRfEzdMnS
++uk03t/Qre2NS7KbwnUCAwEAAaNOMEwwSgYDVR0RBEMwQYIOa2IuZXhhbXBsZS5v
+cmeGFmh0dHA6Ly93d3cuZXhhbXBsZS5vcmeGF2h0dHBzOi8vd3dzLmV4YW1wbGUu
+b3JnMA0GCSqGSIb3DQEBCwUAA4GBAKqFqW5gCso422bXriCBJoygokOTTOw1Rzpq
+K8Mm0B8W9rrW9OTkoLEcjekllZcUCZFin2HovHC5HlHZz+mQvBI1M6sN2HVQbSzS
+EgL66U9gwJVnn9/U1hXhJ0LO28aGbyE29DxnewNR741dWN3oFxCdlNaO6eMWaEsO
+gduJ5sDl
+-----END CERTIFICATE-----
diff --git a/lib/public_key/test/public_key_SUITE_data/verify_hostname.conf b/lib/public_key/test/public_key_SUITE_data/verify_hostname.conf
new file mode 100644
index 0000000000..a28864dc78
--- /dev/null
+++ b/lib/public_key/test/public_key_SUITE_data/verify_hostname.conf
@@ -0,0 +1,16 @@
+[req]
+prompt = no
+distinguished_name = DN
+
+[DN]
+C=SE
+CN=example.com
+
+[SAN]
+subjectAltName = @alt_names
+
+[alt_names]
+DNS = kb.example.org
+URI.1 = http://www.example.org
+URI.2 = https://wws.example.org
+
diff --git a/lib/runtime_tools/doc/src/LTTng.xml b/lib/runtime_tools/doc/src/LTTng.xml
index 82a4c79379..7aae5e5c41 100644
--- a/lib/runtime_tools/doc/src/LTTng.xml
+++ b/lib/runtime_tools/doc/src/LTTng.xml
@@ -1,4 +1,4 @@
-<?xml version="1.0" encoding="utf8" ?>
+<?xml version="1.0" encoding="utf-8" ?>
<!DOCTYPE chapter SYSTEM "chapter.dtd">
<chapter>
<header>
diff --git a/lib/runtime_tools/src/Makefile b/lib/runtime_tools/src/Makefile
index 2c902952a1..0ef6b1c521 100644
--- a/lib/runtime_tools/src/Makefile
+++ b/lib/runtime_tools/src/Makefile
@@ -42,7 +42,6 @@ MODULES= \
runtime_tools_sup \
dbg \
dyntrace \
- percept_profile \
system_information \
observer_backend \
ttb_autostart\
diff --git a/lib/runtime_tools/src/percept_profile.erl b/lib/runtime_tools/src/percept_profile.erl
deleted file mode 100644
index 1e8e913b80..0000000000
--- a/lib/runtime_tools/src/percept_profile.erl
+++ /dev/null
@@ -1,195 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%%
-%% @doc Percept Collector
-%%
-%% This module provides the user interface for the percept data
-% collection (profiling).
-%%
-
--module(percept_profile).
--export([
- start/1,
- start/2,
- start/3,
- stop/0
- ]).
-
-
-%%==========================================================================
-%%
-%% Type definitions
-%%
-%%==========================================================================
-
-%% @type percept_option() = procs | ports | exclusive
-
--type percept_option() :: 'procs' | 'ports' | 'exclusive' | 'scheduler'.
-
-%%==========================================================================
-%%
-%% Interface functions
-%%
-%%==========================================================================
-
-%% @spec start(Filename::string()) -> {ok, Port} | {already_started, Port}
-%% @equiv start(Filename, [procs])
-
--spec start(Filename :: file:filename()) ->
- {'ok', port()} | {'already_started', port()}.
-
-start(Filename) ->
- profile_to_file(Filename, [procs]).
-
-%% @spec start(Filename::string(), [percept_option()]) -> {ok, Port} | {already_started, Port}
-%% Port = port()
-%% @doc Starts profiling with supplied options.
-%% All events are stored in the file given by Filename.
-%% An explicit call to stop/0 is needed to stop profiling.
-
--spec start(Filename :: file:filename(),
- Options :: [percept_option()]) ->
- {'ok', port()} | {'already_started', port()}.
-
-start(Filename, Options) ->
- profile_to_file(Filename, Options).
-
-%% @spec start(string(), MFA::mfa(), [percept_option()]) -> ok | {already_started, Port} | {error, not_started}
-%% Port = port()
-%% @doc Starts profiling at the entrypoint specified by the MFA. All events are collected,
-%% this means that processes outside the scope of the entry-point are also profiled.
-%% No explicit call to stop/0 is needed, the profiling stops when
-%% the entry function returns.
-
--spec start(Filename :: file:filename(),
- Entry :: {atom(), atom(), list()},
- Options :: [percept_option()]) ->
- 'ok' | {'already_started', port()} | {'error', 'not_started'}.
-
-start(Filename, {Module, Function, Args}, Options) ->
- case whereis(percept_port) of
- undefined ->
- {ok, _} = profile_to_file(Filename, Options),
- erlang:apply(Module, Function, Args),
- stop();
- Port ->
- {already_started, Port}
- end.
-
-deliver_all_trace() ->
- Tracee = self(),
- Tracer = spawn(fun() ->
- receive {Tracee, start} -> ok end,
- Ref = erlang:trace_delivered(Tracee),
- receive {trace_delivered, Tracee, Ref} -> Tracee ! {self(), ok} end
- end),
- erlang:trace(Tracee, true, [procs, {tracer, Tracer}]),
- Tracer ! {Tracee, start},
- receive {Tracer, ok} -> ok end,
- erlang:trace(Tracee, false, [procs]),
- ok.
-
-%% @spec stop() -> ok | {'error', 'not_started'}
-%% @doc Stops profiling.
-
--spec stop() -> 'ok' | {'error', 'not_started'}.
-
-stop() ->
- _ = erlang:system_profile(undefined, [runnable_ports, runnable_procs]),
- erlang:trace(all, false, [procs, ports, timestamp]),
- deliver_all_trace(),
- case whereis(percept_port) of
- undefined ->
- {error, not_started};
- Port ->
- erlang:port_command(Port, erlang:term_to_binary({profile_stop, erlang:timestamp()})),
- %% trace delivered?
- erlang:port_close(Port),
- ok
- end.
-
-%%==========================================================================
-%%
-%% Auxiliary functions
-%%
-%%==========================================================================
-
-profile_to_file(Filename, Opts) ->
- case whereis(percept_port) of
- undefined ->
- io:format("Starting profiling.~n", []),
-
- erlang:system_flag(multi_scheduling, block),
- Port = (dbg:trace_port(file, Filename))(),
- % Send start time
- erlang:port_command(Port, erlang:term_to_binary({profile_start, erlang:timestamp()})),
- erlang:system_flag(multi_scheduling, unblock),
-
- %% Register Port
- erlang:register(percept_port, Port),
- set_tracer(Port, Opts),
- {ok, Port};
- Port ->
- io:format("Profiling already started at port ~p.~n", [Port]),
- {already_started, Port}
- end.
-
-%% set_tracer
-
-set_tracer(Port, Opts) ->
- {TOpts, POpts} = parse_profile_options(Opts),
- % Setup profiling and tracing
- erlang:trace(all, true, [{tracer, Port}, timestamp | TOpts]),
- _ = erlang:system_profile(Port, POpts),
- ok.
-
-%% parse_profile_options
-
-parse_profile_options(Opts) ->
- parse_profile_options(Opts, {[],[]}).
-
-parse_profile_options([], Out) ->
- Out;
-parse_profile_options([Opt|Opts], {TOpts, POpts}) ->
- case Opt of
- procs ->
- parse_profile_options(Opts, {
- [procs | TOpts],
- [runnable_procs | POpts]
- });
- ports ->
- parse_profile_options(Opts, {
- [ports | TOpts],
- [runnable_ports | POpts]
- });
- scheduler ->
- parse_profile_options(Opts, {
- TOpts,
- [scheduler | POpts]
- });
- exclusive ->
- parse_profile_options(Opts, {
- TOpts,
- [exclusive | POpts]
- });
- _ ->
- parse_profile_options(Opts, {TOpts, POpts})
- end.
diff --git a/lib/runtime_tools/src/runtime_tools.app.src b/lib/runtime_tools/src/runtime_tools.app.src
index 690c61a4c3..d6c1f17e70 100644
--- a/lib/runtime_tools/src/runtime_tools.app.src
+++ b/lib/runtime_tools/src/runtime_tools.app.src
@@ -20,8 +20,8 @@
{application, runtime_tools,
[{description, "RUNTIME_TOOLS"},
{vsn, "%VSN%"},
- {modules, [appmon_info, dbg,observer_backend,percept_profile,
- runtime_tools,runtime_tools_sup,erts_alloc_config,
+ {modules, [appmon_info, dbg,observer_backend,runtime_tools,
+ runtime_tools_sup,erts_alloc_config,
ttb_autostart,dyntrace,system_information,
msacc]},
{registered, [runtime_tools_sup]},
@@ -30,5 +30,3 @@
{mod, {runtime_tools, []}},
{runtime_dependencies, ["stdlib-3.0","mnesia-4.12","kernel-5.0",
"erts-8.0"]}]}.
-
-
diff --git a/lib/sasl/doc/src/systools.xml b/lib/sasl/doc/src/systools.xml
index fa503fa573..4ca4a08329 100644
--- a/lib/sasl/doc/src/systools.xml
+++ b/lib/sasl/doc/src/systools.xml
@@ -268,7 +268,7 @@
<fsummary>Creates a release package.</fsummary>
<type>
<v>Name = string()</v>
- <v>Opt = {dirs,[IncDir]} | {path,[Dir]} | {variables,[Var]} | {var_tar,VarTar} | {erts,Dir} | src_tests | exref | {exref,[App]} | silent | {outdir,Dir}</v>
+ <v>Opt = {dirs,[IncDir]} | {path,[Dir]} | {variables,[Var]} | {var_tar,VarTar} | {erts,Dir} | src_tests | exref | {exref,[App]} | silent | {outdir,Dir} | | no_warn_sasl | warnings_as_errors</v>
<v>&nbsp;Dir = string()</v>
<v>&nbsp;IncDir = src | include | atom()</v>
<v>&nbsp;Var = {VarName,PreFix}</v>
@@ -297,6 +297,10 @@
directory unless <c>Name</c> contains a path. If option
<c>{outdir,Dir}</c> is specified, it is located in <c>Dir</c>
instead.</p>
+ <p>If SASL is not included as an application in
+ the <c>.rel</c> file, a warning is issued because such a
+ release cannot be used in an upgrade. To turn off this
+ warning, add option <c>no_warn_sasl</c>.</p>
<p>By default, the release package contains the directories
<c>lib/App-Vsn/ebin</c> and <c>lib/App-Vsn/priv</c> for each
included application. If more directories are to be included,
diff --git a/lib/sasl/src/release_handler.erl b/lib/sasl/src/release_handler.erl
index 1fcc9a0288..3250311b8f 100644
--- a/lib/sasl/src/release_handler.erl
+++ b/lib/sasl/src/release_handler.erl
@@ -831,7 +831,7 @@ do_unpack_release(Root, RelDir, ReleaseName, Releases) ->
Tar = filename:join(RelDir, ReleaseName ++ ".tar.gz"),
do_check_file(Tar, regular),
Rel = ReleaseName ++ ".rel",
- extract_rel_file(filename:join("releases", Rel), Tar, Root),
+ _ = extract_rel_file(filename:join("releases", Rel), Tar, Root),
RelFile = filename:join(RelDir, Rel),
Release = check_rel(Root, RelFile, false),
#release{vsn = Vsn} = Release,
@@ -1841,14 +1841,12 @@ do_check_file(Master, FileName, Type) ->
%% by the user in another way, i.e. ignore this here.
%%-----------------------------------------------------------------
extract_rel_file(Rel, Tar, Root) ->
- erl_tar:extract(Tar, [{files, [Rel]}, {cwd, Root}, compressed]).
+ _ = erl_tar:extract(Tar, [{files, [Rel]}, {cwd, Root}, compressed]).
extract_tar(Root, Tar) ->
case erl_tar:extract(Tar, [keep_old_files, {cwd, Root}, compressed]) of
ok ->
ok;
- {error, Reason, Name} -> % Old erl_tar.
- throw({error, {cannot_extract_file, Name, Reason}});
{error, {Name, Reason}} -> % New erl_tar (R3A).
throw({error, {cannot_extract_file, Name, Reason}})
end.
diff --git a/lib/sasl/src/systools_make.erl b/lib/sasl/src/systools_make.erl
index 6a16c8689e..f03b03dc08 100644
--- a/lib/sasl/src/systools_make.erl
+++ b/lib/sasl/src/systools_make.erl
@@ -94,7 +94,11 @@ make_script(RelName, Output, Flags) when is_list(RelName),
Warnings = wsasl(Flags, Warnings0),
case systools_lib:werror(Flags, Warnings) of
true ->
- return(ok,Warnings,Flags);
+ Warnings1 = [W || {warning,W}<-Warnings],
+ return({error,?MODULE,
+ {warnings_treated_as_errors,Warnings1}},
+ Warnings,
+ Flags);
false ->
case generate_script(Output,Release,Appls,Flags) of
ok ->
@@ -115,7 +119,6 @@ make_script(RelName, _Output, Flags) when is_list(Flags) ->
make_script(RelName, _Output, Flags) ->
badarg(Flags,[RelName, Flags]).
-
wsasl(Options, Warnings) ->
case lists:member(no_warn_sasl,Options) of
true -> lists:delete({warning,missing_sasl},Warnings);
@@ -148,21 +151,10 @@ get_outdir(Flags) ->
return(ok,Warnings,Flags) ->
case member(silent,Flags) of
true ->
- case systools_lib:werror(Flags, Warnings) of
- true ->
- error;
- false ->
- {ok,?MODULE,Warnings}
- end;
+ {ok,?MODULE,Warnings};
_ ->
- case member(warnings_as_errors,Flags) of
- true ->
- io:format("~ts",[format_warning(Warnings, true)]),
- error;
- false ->
- io:format("~ts",[format_warning(Warnings)]),
- ok
- end
+ io:format("~ts",[format_warning(Warnings)]),
+ ok
end;
return({error,Mod,Error},_,Flags) ->
case member(silent,Flags) of
@@ -300,6 +292,8 @@ add_apply_upgrade(Script,Args) ->
%% {variables,[{Name,AbsString}]}
%% {machine, jam | beam | vee}
%% {var_tar, include | ownfile | omit}
+%% no_warn_sasl
+%% warnings_as_errors
%%
%% The tar file contains:
%% lib/App-Vsn/ebin
@@ -332,13 +326,23 @@ make_tar(RelName, Flags) when is_list(RelName), is_list(Flags) ->
Path = make_set(Path1 ++ code:get_path()),
ModTestP = {member(src_tests, Flags),xref_p(Flags)},
case get_release(RelName, Path, ModTestP, machine(Flags)) of
- {ok, Release, Appls, Warnings} ->
- case catch mk_tar(RelName, Release, Appls, Flags, Path1) of
- ok ->
- return(ok,Warnings,Flags);
- Error ->
- return(Error,Warnings,Flags)
- end;
+ {ok, Release, Appls, Warnings0} ->
+ Warnings = wsasl(Flags, Warnings0),
+ case systools_lib:werror(Flags, Warnings) of
+ true ->
+ Warnings1 = [W || {warning,W}<-Warnings],
+ return({error,?MODULE,
+ {warnings_treated_as_errors,Warnings1}},
+ Warnings,
+ Flags);
+ false ->
+ case catch mk_tar(RelName, Release, Appls, Flags, Path1) of
+ ok ->
+ return(ok,Warnings,Flags);
+ Error ->
+ return(Error,Warnings,Flags)
+ end
+ end;
Error ->
return(Error,[],Flags)
end;
@@ -1904,8 +1908,10 @@ del_tar(Tar, TarName) ->
file:delete(TarName).
add_to_tar(Tar, FromFile, ToFile) ->
- case erl_tar:add(Tar, FromFile, ToFile, [compressed, dereference]) of
+ case catch erl_tar:add(Tar, FromFile, ToFile, [compressed, dereference]) of
ok -> ok;
+ {'EXIT', Reason} ->
+ throw({error, {tar_error, {add, FromFile, Reason}}});
{error, Error} ->
throw({error, {tar_error, {add, FromFile, Error}}})
end.
@@ -2113,90 +2119,80 @@ cas([Y | Args], X) ->
%% Check Options for make_tar
check_args_tar(Args) ->
- cat(Args, {undef, undef, undef, undef, undef, undef, undef, undef, undef, undef, []}).
+ cat(Args, []).
-cat([], {_Path,_Sil,_Dirs,_Erts,_Test,_Var,_VarTar,_Mach,_Xref,_XrefApps, X}) ->
+cat([], X) ->
X;
%%% path ---------------------------------------------------------------
-cat([{path, P} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(P) ->
+cat([{path, P} | Args], X) when is_list(P) ->
case check_path(P) of
ok ->
- cat(Args, {P, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+ cat(Args, X);
error ->
- cat(Args, {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X++[{path,P}]})
+ cat(Args, X++[{path,P}])
end;
%%% silent -------------------------------------------------------------
-cat([silent | Args], {Path, _Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, silent, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([silent | Args], X) ->
+ cat(Args, X);
%%% dirs ---------------------------------------------------------------
-cat([{dirs, D} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X}) ->
+cat([{dirs, D} | Args], X) ->
case check_dirs(D) of
ok ->
- cat(Args, {Path, Sil, D, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+ cat(Args, X);
error ->
- cat(Args, {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X++[{dirs, D}]})
+ cat(Args, X++[{dirs, D}])
end;
%%% erts ---------------------------------------------------------------
-cat([{erts, E} | Args], {Path, Sil, Dirs, _Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(E)->
- cat(Args, {Path, Sil, Dirs, E, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([{erts, E} | Args], X) when is_list(E)->
+ cat(Args, X);
%%% src_tests ----------------------------------------------------
-cat([src_tests | Args], {Path, Sil, Dirs, Erts, _Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, src_tests, Var, VarTar, Mach,
- Xref, XrefApps, X});
+cat([src_tests | Args], X) ->
+ cat(Args, X);
%%% variables ----------------------------------------------------------
-cat([{variables, V} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(V) ->
+cat([{variables, V} | Args], X) when is_list(V) ->
case check_vars(V) of
ok ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, V, VarTar, Mach, Xref, XrefApps, X});
+ cat(Args, X);
error ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach,
- Xref, XrefApps, X++[{variables, V}]})
+ cat(Args, X++[{variables, V}])
end;
%%% var_tar ------------------------------------------------------------
-cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == include ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, include, Mach, Xref, XrefApps, X});
-cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == ownfile ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, ownfile, Mach, Xref, XrefApps, X});
-cat([{var_tar, VT} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, _VarTar, Mach, Xref, XrefApps, X}) when VT == omit ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, omit, Mach, Xref, XrefApps, X});
+cat([{var_tar, VT} | Args], X) when VT == include;
+ VT == ownfile;
+ VT == omit ->
+ cat(Args, X);
%%% machine ------------------------------------------------------------
-cat([{machine, M} | Args], {Path, Sil, Dirs, Erts, Test,
- Var, VarTar, Mach, Xref, XrefApps, X}) when is_atom(M) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([{machine, M} | Args], X) when is_atom(M) ->
+ cat(Args, X);
%%% exref --------------------------------------------------------------
-cat([exref | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, _Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, exref, XrefApps, X});
+cat([exref | Args], X) ->
+ cat(Args, X);
%%% exref Apps ---------------------------------------------------------
-cat([{exref, Apps} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(Apps) ->
+cat([{exref, Apps} | Args], X) when is_list(Apps) ->
case check_apps(Apps) of
ok ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach,
- Xref, Apps, X});
+ cat(Args, X);
error ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach,
- Xref, XrefApps, X++[{exref, Apps}]})
+ cat(Args, X++[{exref, Apps}])
end;
%%% outdir Dir ---------------------------------------------------------
-cat([{outdir, Dir} | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) when is_list(Dir) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach,
- Xref, XrefApps, X});
+cat([{outdir, Dir} | Args], X) when is_list(Dir) ->
+ cat(Args, X);
%%% otp_build (secret, not documented) ---------------------------------
-cat([otp_build | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([otp_build | Args], X) ->
+ cat(Args, X);
+%%% warnings_as_errors ----
+cat([warnings_as_errors | Args], X) ->
+ cat(Args, X);
+%%% no_warn_sasl ----
+cat([no_warn_sasl | Args], X) ->
+ cat(Args, X);
%%% no_module_tests (kept for backwards compatibility, but ignored) ----
-cat([no_module_tests | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X});
+cat([no_module_tests | Args], X) ->
+ cat(Args, X);
%%% ERROR --------------------------------------------------------------
-cat([Y | Args], {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X}) ->
- cat(Args, {Path, Sil, Dirs, Erts, Test, Var, VarTar, Mach, Xref, XrefApps, X++[Y]}).
+cat([Y | Args], X) ->
+ cat(Args, X++[Y]).
check_path([]) ->
ok;
@@ -2296,6 +2292,9 @@ format_error({delete,File,Error}) ->
[File,file:format_error(Error)]);
format_error({tar_error,What}) ->
form_tar_err(What);
+format_error({warnings_treated_as_errors,Warnings}) ->
+ io_lib:format("Warnings being treated as errors:~n~ts",
+ [map(fun(W) -> form_warn("",W) end, Warnings)]);
format_error(ListOfErrors) when is_list(ListOfErrors) ->
format_errors(ListOfErrors);
format_error(E) -> io_lib:format("~p~n",[E]).
@@ -2352,24 +2351,15 @@ form_tar_err({add, File, Error}) ->
%% Format warning
format_warning(Warnings) ->
- format_warning(Warnings, false).
-
-format_warning(Warnings, Werror) ->
- Prefix = case Werror of
- true ->
- "";
- false ->
- "*WARNING* "
- end,
- map(fun({warning,W}) -> form_warn(Prefix, W) end, Warnings).
-
-form_warn(Prefix, {source_not_found,{Mod,_,App,_,_}}) ->
+ map(fun({warning,W}) -> form_warn("*WARNING* ", W) end, Warnings).
+
+form_warn(Prefix, {source_not_found,{Mod,App,_}}) ->
io_lib:format("~ts~w: Source code not found: ~w.erl~n",
[Prefix,App,Mod]);
form_warn(Prefix, {{parse_error, File},{_,_,App,_,_}}) ->
io_lib:format("~ts~w: Parse error: ~p~n",
[Prefix,App,File]);
-form_warn(Prefix, {obj_out_of_date,{Mod,_,App,_,_}}) ->
+form_warn(Prefix, {obj_out_of_date,{Mod,App,_}}) ->
io_lib:format("~ts~w: Object code (~w) out of date~n",
[Prefix,App,Mod]);
form_warn(Prefix, {exref_undef, Undef}) ->
@@ -2379,8 +2369,8 @@ form_warn(Prefix, {exref_undef, Undef}) ->
end,
map(F, Undef);
form_warn(Prefix, missing_sasl) ->
- io_lib:format("~ts: Missing application sasl. "
+ io_lib:format("~tsMissing application sasl. "
"Can not upgrade with this release~n",
[Prefix]);
form_warn(Prefix, What) ->
- io_lib:format("~ts ~p~n", [Prefix,What]).
+ io_lib:format("~ts~p~n", [Prefix,What]).
diff --git a/lib/sasl/src/systools_relup.erl b/lib/sasl/src/systools_relup.erl
index 28534dc0c8..7e1844b400 100644
--- a/lib/sasl/src/systools_relup.erl
+++ b/lib/sasl/src/systools_relup.erl
@@ -155,36 +155,12 @@ mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs) ->
mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs, Opts) ->
case check_opts(Opts) of
[] ->
- R = (catch do_mk_relup(TopRelFile,BaseUpRelDcs,BaseDnRelDcs,
- add_code_path(Opts), Opts)),
- case {get_opt(silent, Opts), get_opt(noexec, Opts)} of
- {false, false} ->
- case R of
- {ok, _Res, _Mod, Ws} ->
- print_warnings(Ws, Opts),
- case systools_lib:werror(Opts, Ws) of
- true ->
- error;
- false ->
- ok
- end;
- Other ->
- print_error(Other),
- error
- end;
- _ ->
- case R of
- {ok, _Res, _Mod, Ws} ->
- case systools_lib:werror(Opts, Ws) of
- true ->
- error;
- false ->
- R
- end;
- R ->
- R
- end
- end;
+ R = try do_mk_relup(TopRelFile,BaseUpRelDcs,BaseDnRelDcs,
+ add_code_path(Opts), Opts)
+ catch throw:Error ->
+ Error
+ end,
+ done_mk_relup(Opts, R);
BadArg ->
erlang:error({badarg, BadArg})
end.
@@ -224,17 +200,45 @@ do_mk_relup(TopRelFile, BaseUpRelDcs, BaseDnRelDcs, Path, Opts) ->
{Dn, Ws2} = foreach_baserel_dn(TopRel, TopApps, BaseDnRelDcs,
Path, Opts, Ws1),
Relup = {TopRel#release.vsn, Up, Dn},
- case systools_lib:werror(Opts, Ws2) of
- true ->
- ok;
- false ->
- write_relup_file(Relup, Opts)
- end,
- {ok, Relup, ?MODULE, Ws2};
+
+ {ok, Relup, Ws2};
Other ->
- throw(Other)
+ Other
end.
+done_mk_relup(Opts, {ok,Relup,Ws}) ->
+ WAE = get_opt(warnings_as_errors,Opts),
+ Silent = get_opt(silent,Opts),
+ Noexec = get_opt(noexec,Opts),
+
+ if WAE andalso Ws=/=[] ->
+ return_error(Silent,
+ {error,?MODULE,{warnings_treated_as_errors, Ws}});
+ not Noexec ->
+ case write_relup_file(Relup,Opts) of
+ ok ->
+ return_ok(Silent,Relup,Ws);
+ Error ->
+ return_error(Silent,Error)
+ end;
+ true -> % noexec
+ return_ok(true,Relup,Ws)
+ end;
+done_mk_relup(Opts, Error) ->
+ return_error(get_opt(silent,Opts) orelse get_opt(noexec,Opts), Error).
+
+return_error(true, Error) ->
+ Error;
+return_error(false, Error) ->
+ print_error(Error),
+ error.
+
+return_ok(true,Relup,Ws) ->
+ {ok,Relup,?MODULE,Ws};
+return_ok(false,_Relup,Ws) ->
+ print_warnings(Ws),
+ ok.
+
%%-----------------------------------------------------------------
%% foreach_baserel_up(Rel, TopApps, BaseRelDcs, Path, Opts, Ws) -> Ret
%% foreach_baserel_dn(Rel, TopApps, BaseRelDcs, Path, Opts, Ws) -> Ret
@@ -529,33 +533,18 @@ to_list(X) when is_list(X) -> X.
%% Writes a relup file.
%%
write_relup_file(Relup, Opts) ->
- case get_opt(noexec, Opts) of
- true ->
- ok;
- _ ->
- Filename = case get_opt(outdir, Opts) of
- OutDir when is_list(OutDir) ->
- filename:join(filename:absname(OutDir),
- "relup");
- false ->
- "relup";
- Badarg ->
- throw({error, ?MODULE, {badarg, {outdir,Badarg}}})
- end,
-
- case file:open(Filename, [write]) of
- {ok, Fd} ->
- io:format(Fd, "~p.~n", [Relup]),
- case file:close(Fd) of
- ok -> ok;
- {error,Reason} ->
- throw({error, ?MODULE,
- {file_problem, {"relup", {close,Reason}}}})
- end;
- {error, Reason} ->
- throw({error, ?MODULE,
- {file_problem, {"relup", {open, Reason}}}})
- end
+ Filename = filename:join(filename:absname(get_opt(outdir,Opts)),
+ "relup"),
+ case file:open(Filename, [write]) of
+ {ok, Fd} ->
+ io:format(Fd, "~p.~n", [Relup]),
+ case file:close(Fd) of
+ ok -> ok;
+ {error,Reason} ->
+ {error, ?MODULE, {file_problem, {"relup", {close,Reason}}}}
+ end;
+ {error, Reason} ->
+ {error, ?MODULE, {file_problem, {"relup", {open, Reason}}}}
end.
add_code_path(Opts) ->
@@ -593,10 +582,9 @@ default(path) -> false;
default(noexec) -> false;
default(silent) -> false;
default(restart_emulator) -> false;
-default(outdir) -> false.
+default(outdir) -> ".";
+default(warnings_as_errors) -> false.
-print_error({'EXIT', Err}) ->
- print_error(Err);
print_error({error, Mod, Error}) ->
S = apply(Mod, format_error, [Error]),
io:format(S, []);
@@ -614,24 +602,20 @@ format_error({missing_sasl,Release}) ->
io_lib:format("No sasl application in release ~ts, ~ts. "
"Can not be upgraded.",
[Release#release.name, Release#release.vsn]);
+format_error({warnings_treated_as_errors, Warnings}) ->
+ io_lib:format("Warnings being treated as errors:~n~ts",
+ [[format_warning("",W) || W <- Warnings]]);
format_error(Error) ->
- io:format("~p~n", [Error]).
+ io_lib:format("~p~n", [Error]).
-print_warnings(Ws, Opts) when is_list(Ws) ->
- lists:foreach(fun(W) -> print_warning(W, Opts) end, Ws);
-print_warnings(W, Opts) ->
- print_warning(W, Opts).
+print_warnings(Ws) when is_list(Ws) ->
+ lists:foreach(fun(W) -> print_warning(W) end, Ws);
+print_warnings(W) ->
+ print_warning(W).
-print_warning(W, Opts) ->
- Prefix = case lists:member(warnings_as_errors, Opts) of
- true ->
- "";
- false ->
- "*WARNING* "
- end,
- S = format_warning(Prefix, W),
- io:format("~ts", [S]).
+print_warning(W) ->
+ io:format("~ts", [format_warning(W)]).
format_warning(W) ->
format_warning("*WARNING* ", W).
@@ -639,6 +623,8 @@ format_warning(W) ->
format_warning(Prefix, {erts_vsn_changed, {Rel1, Rel2}}) ->
io_lib:format("~tsThe ERTS version changed between ~p and ~p~n",
[Prefix, Rel1, Rel2]);
+format_warning(Prefix, pre_R15_emulator_upgrade) ->
+ io_lib:format("~tsUpgrade from an OTP version earlier than R15. New code should be compiled with the old emulator.~n",[Prefix]);
format_warning(Prefix, What) ->
io_lib:format("~ts~p~n",[Prefix, What]).
diff --git a/lib/sasl/test/systools_SUITE.erl b/lib/sasl/test/systools_SUITE.erl
index dd5f277a77..0c98232467 100644
--- a/lib/sasl/test/systools_SUITE.erl
+++ b/lib/sasl/test/systools_SUITE.erl
@@ -29,6 +29,8 @@
-module(systools_SUITE).
+-compile(export_all).
+
%%-define(debug, true).
-include_lib("common_test/include/ct.hrl").
@@ -39,31 +41,6 @@
-include_lib("kernel/include/file.hrl").
--export([all/0,suite/0,groups/0,init_per_group/2,end_per_group/2]).
-
--export([script_options/1, normal_script/1, unicode_script/1,
- unicode_script/2, no_mod_vsn_script/1,
- wildcard_script/1, variable_script/1, no_sasl_script/1,
- no_dot_erlang_script/1,
- abnormal_script/1, src_tests_script/1, crazy_script/1,
- included_script/1, included_override_script/1,
- included_fail_script/1, included_bug_script/1, exref_script/1,
- duplicate_modules_script/1,
- otp_3065_circular_dependenies/1, included_and_used_sort_script/1]).
--export([tar_options/1, normal_tar/1, no_mod_vsn_tar/1, system_files_tar/1,
- system_files_tar/2, invalid_system_files_tar/1,
- invalid_system_files_tar/2, variable_tar/1,
- src_tests_tar/1, var_tar/1, exref_tar/1, link_tar/1,
- otp_9507_path_ebin/1]).
--export([normal_relup/1, restart_relup/1, abnormal_relup/1, no_sasl_relup/1,
- no_appup_relup/1, bad_appup_relup/1, app_start_type_relup/1,
- regexp_relup/1]).
--export([normal_hybrid/1,hybrid_no_old_sasl/1,hybrid_no_new_sasl/1]).
--export([otp_6226_outdir/1, app_file_defaults/1]).
--export([init_per_suite/1, end_per_suite/1,
- init_per_testcase/2, end_per_testcase/2]).
--export([delete_tree/1]).
-
-import(lists, [foldl/3]).
-define(default_timeout, ?t:minutes(20)).
@@ -91,7 +68,8 @@ groups() ->
{tar, [],
[tar_options, normal_tar, no_mod_vsn_tar, system_files_tar,
invalid_system_files_tar, variable_tar,
- src_tests_tar, var_tar, exref_tar, link_tar, otp_9507_path_ebin]},
+ src_tests_tar, var_tar, exref_tar, link_tar, no_sasl_tar,
+ otp_9507_path_ebin]},
{relup, [],
[normal_relup, restart_relup, abnormal_relup, no_sasl_relup,
no_appup_relup, bad_appup_relup, app_start_type_relup, regexp_relup
@@ -238,6 +216,7 @@ normal_script(Config) when is_list(Config) ->
%% Check the same but w. silent flag
{ok, _, []} = systools:make_script(LatestName, [silent]),
+ {ok, _, []} = systools:make_script(LatestName, [silent,warnings_as_errors]),
%% Use the local option
ok = systools:make_script(LatestName, [local]),
@@ -456,9 +435,16 @@ no_sasl_script(Config) when is_list(Config) ->
{ok, _ , [{warning,missing_sasl}]} =
systools:make_script(LatestName,[{path, P},silent]),
+ {error, systools_make, {warnings_treated_as_errors,[missing_sasl]}} =
+ systools:make_script(LatestName,[{path, P},silent,warnings_as_errors]),
+
{ok, _ , []} =
systools:make_script(LatestName,[{path, P},silent, no_warn_sasl]),
+ {ok, _ , []} =
+ systools:make_script(LatestName,[{path, P},silent, no_warn_sasl,
+ warnings_as_errors]),
+
ok = file:set_cwd(OldDir),
ok.
@@ -525,7 +511,9 @@ src_tests_script(Config) when is_list(Config) ->
ok = file:delete(BootFile),
false = filelib:is_regular(BootFile),
%% With warnings_as_errors and src_tests option, an error should be issued
- error =
+ {error, systools_make,
+ {warnings_treated_as_errors, [{obj_out_of_date,_},
+ {source_not_found,_}]}} =
systools:make_script(LatestName, [silent, {path, N}, src_tests,
warnings_as_errors]),
error =
@@ -745,7 +733,7 @@ exref_script(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [{path,P}, silent]),
+ {ok, _, []} = systools:make_script(LatestName, [{path,P}, silent]),
%% Complete exref
{ok, _, W1} =
@@ -894,10 +882,10 @@ normal_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
ok = systools:make_tar(LatestName, [{path, P}]),
ok = check_tar(fname([lib,'db-2.1',ebin,'db.app']), LatestName),
- {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]),
+ {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]),
ok = check_tar(fname([lib,'fe-3.1',ebin,'fe.app']), LatestName),
ok = file:set_cwd(OldDir),
@@ -918,10 +906,10 @@ no_mod_vsn_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
ok = systools:make_tar(LatestName, [{path, P}]),
ok = check_tar(fname([lib,'db-3.1',ebin,'db.app']), LatestName),
- {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]),
+ {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]),
ok = check_tar(fname([lib,'fe-3.1',ebin,'fe.app']), LatestName),
ok = file:set_cwd(OldDir),
@@ -945,11 +933,11 @@ system_files_tar(Config) ->
ok = file:write_file("sys.config","[].\n"),
ok = file:write_file("relup","{\"LATEST\",[],[]}.\n"),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
ok = systools:make_tar(LatestName, [{path, P}]),
ok = check_tar(fname(["releases","LATEST","sys.config"]), LatestName),
ok = check_tar(fname(["releases","LATEST","relup"]), LatestName),
- {ok, _, _} = systools:make_tar(LatestName, [{path, P}, silent]),
+ {ok, _, []} = systools:make_tar(LatestName, [{path, P}, silent]),
ok = check_tar(fname(["releases","LATEST","sys.config"]), LatestName),
ok = check_tar(fname(["releases","LATEST","relup"]), LatestName),
@@ -978,7 +966,7 @@ invalid_system_files_tar(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
%% Add dummy relup and sys.config - faulty sys.config
ok = file:write_file("sys.config","[]\n"), %!!! syntax error - missing '.'
@@ -1036,7 +1024,7 @@ variable_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName,
+ {ok, _, []} = systools:make_script(LatestName,
[silent,
{path, P},
{variables,[{"TEST", LibDir}]}]),
@@ -1045,7 +1033,7 @@ variable_tar(Config) when is_list(Config) ->
{variables,[{"TEST", LibDir}]}]),
ok = check_var_tar("TEST", LatestName),
- {ok, _, _} = systools:make_tar(LatestName,
+ {ok, _, []} = systools:make_tar(LatestName,
[{path, P}, silent,
{variables,[{"TEST", LibDir}]}]),
ok = check_var_tar("TEST", LatestName),
@@ -1174,7 +1162,7 @@ var_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName,
+ {ok, _, []} = systools:make_script(LatestName,
[silent,
{path, P},
{variables,[{"TEST", LibDir}]}]),
@@ -1218,7 +1206,7 @@ exref_tar(Config) when is_list(Config) ->
ok = file:set_cwd(LatestDir),
- {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ {ok, _, []} = systools:make_script(LatestName, [silent, {path, P}]),
%% Complete exref
{ok, _, W1} =
@@ -1248,7 +1236,41 @@ exref_tar(Config) when is_list(Config) ->
ok = file:set_cwd(OldDir),
ok.
+%% make_tar: Create tar without sasl appl. Check warning.
+no_sasl_tar(Config) when is_list(Config) ->
+ {ok, OldDir} = file:get_cwd(),
+ {LatestDir, LatestName} = create_script(latest1_no_sasl,Config),
+
+ DataDir = filename:absname(?copydir),
+ LibDir = fname([DataDir, d_normal, lib]),
+ P = [fname([LibDir, '*', ebin]),
+ fname([DataDir, lib, kernel, ebin]),
+ fname([DataDir, lib, stdlib, ebin]),
+ fname([DataDir, lib, sasl, ebin])],
+
+ ok = file:set_cwd(LatestDir),
+
+ {ok, _, _} = systools:make_script(LatestName, [silent, {path, P}]),
+ ok = systools:make_tar(LatestName, [{path, P}]),
+ {ok, _, [{warning,missing_sasl}]} =
+ systools:make_tar(LatestName, [{path, P}, silent]),
+ {ok, _, []} =
+ systools:make_tar(LatestName, [{path, P}, silent, no_warn_sasl]),
+ {ok, _, []} =
+ systools:make_tar(LatestName, [{path, P}, silent, no_warn_sasl,
+ warnings_as_errors]),
+ TarFile = LatestName ++ ".tar.gz",
+ true = filelib:is_regular(TarFile),
+ ok = file:delete(TarFile),
+ {error, systools_make, {warnings_treated_as_errors,[missing_sasl]}} =
+ systools:make_tar(LatestName, [{path, P}, silent, warnings_as_errors]),
+ error =
+ systools:make_tar(LatestName, [{path, P}, warnings_as_errors]),
+ false = filelib:is_regular(TarFile),
+
+ ok = file:set_cwd(OldDir),
+ ok.
%% make_tar: OTP-9507 - make_tar failed when path given as just 'ebin'.
otp_9507_path_ebin(Config) when is_list(Config) ->
@@ -1268,7 +1290,7 @@ otp_9507_path_ebin(Config) when is_list(Config) ->
fname([DataDir, lib, kernel, ebin]),
fname([DataDir, lib, stdlib, ebin]),
fname([DataDir, lib, sasl, ebin])],
- {ok, _, _} = systools:make_script(RelName, [silent, {path, P1}]),
+ {ok, _, []} = systools:make_script(RelName, [silent, {path, P1}]),
ok = systools:make_tar(RelName, [{path, P1}]),
Content1 = tar_contents(RelName),
@@ -1309,7 +1331,7 @@ normal_relup(Config) when is_list(Config) ->
ok = systools:make_relup(LatestName, [LatestName1], [LatestName1],
[{path, P}]),
ok = check_relup([{db, "2.1"}], [{db, "1.0"}]),
- {ok, _, _, []} =
+ {ok, Relup, _, []} =
systools:make_relup(LatestName, [LatestName1], [LatestName1],
[{path, P}, silent]),
ok = check_relup([{db, "2.1"}], [{db, "1.0"}]),
@@ -1322,7 +1344,9 @@ normal_relup(Config) when is_list(Config) ->
error =
systools:make_relup(LatestName, [LatestName2], [LatestName1],
[{path, P}, warnings_as_errors]),
- error =
+ {error, systools_relup,
+ {warnings_treated_as_errors,[pre_R15_emulator_upgrade,
+ {erts_vsn_changed, _}]}} =
systools:make_relup(LatestName, [LatestName2], [LatestName1],
[{path, P}, silent, warnings_as_errors]),
@@ -1341,6 +1365,14 @@ normal_relup(Config) when is_list(Config) ->
%% relup file should exist now
true = filelib:is_regular("relup"),
+ %% file should not be written if noexec option is used.
+ %% delete before running tests.
+ ok = file:delete("relup"),
+ {ok,Relup,_,[]} =
+ systools:make_relup(LatestName, [LatestName1], [LatestName1],
+ [{path, P}, noexec]),
+ false = filelib:is_regular("relup"),
+
ok = file:set_cwd(OldDir),
ok.
diff --git a/lib/snmp/test/snmp_manager_test.erl b/lib/snmp/test/snmp_manager_test.erl
index 71f4017d8b..054e998af4 100644
--- a/lib/snmp/test/snmp_manager_test.erl
+++ b/lib/snmp/test/snmp_manager_test.erl
@@ -1760,7 +1760,7 @@ do_simple_sync_get2(Node, TargetName, Oids, Get, PostVerify)
"~n Rem: ~w", [Reply, _Rem]),
%% verify that the operation actually worked:
- %% The order should be the same, so no need to seach
+ %% The order should be the same, so no need to search
?line ok = case Reply of
{noError, 0, [#varbind{oid = ?sysObjectID_instance,
value = SysObjectID},
@@ -2709,7 +2709,7 @@ do_simple_set2(Node, TargetName, VAVs, Set, PostVerify) ->
"~n Rem: ~w", [Reply, _Rem]),
%% verify that the operation actually worked:
- %% The order should be the same, so no need to seach
+ %% The order should be the same, so no need to search
%% The value we get should be exactly the same as we sent
?line ok = case Reply of
{noError, 0, [#varbind{oid = ?sysName_instance,
@@ -5118,10 +5118,10 @@ inform_swarm_collector(N) ->
%% Note that we need to deal with re-transmissions!
%% That is, the agent did not receive the ack in time,
-%% and therefor did a re-transmit. This means that we
-%% expect to receive more inform's then we actually
-%% sent. So for sucess we assume:
-%%
+%% and therefor did a re-transmit. This means that we
+%% expect to receive more inform's then we actually
+%% sent. So for success we assume:
+%%
%% SentAckCnt = N
%% RespCnt = N
%% RecvCnt >= N
diff --git a/lib/ssh/doc/src/ssh.xml b/lib/ssh/doc/src/ssh.xml
index 6b49f89449..f6e26f5ee8 100644
--- a/lib/ssh/doc/src/ssh.xml
+++ b/lib/ssh/doc/src/ssh.xml
@@ -153,7 +153,7 @@
<item>
<p>IP version to use.</p>
</item>
- <tag><c><![CDATA[{user_dir, string()}]]></c></tag>
+ <tag><marker id="opt_user_dir"></marker><c><![CDATA[{user_dir, string()}]]></c></tag>
<item>
<p>Sets the user directory, that is, the directory containing
<c>ssh</c> configuration files for the user, such as
@@ -175,22 +175,48 @@
supplied with this option.
</p>
</item>
- <tag><c><![CDATA[{silently_accept_hosts, boolean() | accept_fun() | {crypto:digest_type(), accept_fun()} }]]></c>
- <br/>
- <c><![CDATA[accept_fun() :: fun(PeerName::string(), FingerPrint::string()) -> boolean()]]></c>
+ <tag>
+ <c><![CDATA[{silently_accept_hosts, boolean()}]]></c> <br/>
+ <c><![CDATA[{silently_accept_hosts, CallbackFun}]]></c> <br/>
+ <c><![CDATA[{silently_accept_hosts, {HashAlgoSpec, CallbackFun} }]]></c> <br/>
+ <br/>
+ <c><![CDATA[HashAlgoSpec = crypto:digest_type() | [ crypto:digest_type() ] ]]></c><br/>
+ <c><![CDATA[CallbackFun = fun(PeerName, FingerPrint) -> boolean()]]></c><br/>
+ <c><![CDATA[PeerName = string()]]></c><br/>
+ <c><![CDATA[FingerPrint = string() | [ string() ] ]]></c>
</tag>
<item>
- <p>When <c>true</c>, hosts are added to the
- file <c><![CDATA[known_hosts]]></c> without asking the user.
- Defaults to <c>false</c> which will give a user question on stdio of whether to accept or reject a previously
- unseen host.</p>
- <p>If the option value is has an <c>accept_fun()</c>, that fun will called with the arguments
- <c>(PeerName, PeerHostKeyFingerPrint)</c>. The fingerprint is calculated on the Peer's Host Key with
- <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-1">public_key:ssh_hostkey_fingerprint/1</seealso>.
- </p>
- <p>If the <c>crypto:digest_type()</c> is present, the fingerprint is calculated with that digest type by the function
- <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-2">public_key:ssh_hostkey_fingerprint/2</seealso>.
- </p>
+ <p>This option guides the <c>connect</c> function how to act when the connected server presents a Host
+ Key that the client has not seen before. The default is to ask the user with a question on stdio of whether to
+ accept or reject the new Host Key.
+ See also the option <seealso marker="#opt_user_dir"><c>user_dir</c></seealso>
+ for the path to the file <c>known_hosts</c> where previously accepted Host Keys are recorded.
+ </p>
+ <p>The option can be given in three different forms as seen above:</p>
+ <list>
+ <item>The value is a <c>boolean()</c>. The value <c>true</c> will make the client accept any unknown
+ Host Key without any user interaction. The value <c>false</c> keeps the default behaviour of asking the
+ the user on stdio.
+ </item>
+ <item>A <c>CallbackFun</c> will be called and the boolean return value <c>true</c> will make the client
+ accept the Host Key. A return value of <c>false</c> will make the client to reject the Host Key and therefore
+ also the connection will be closed. The arguments to the fun are:
+ <list type="bulleted">
+ <item><c>PeerName</c> - a string with the name or address of the remote host.</item>
+ <item><c>FingerPrint</c> - the fingerprint of the Host Key as
+ <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-1">public_key:ssh_hostkey_fingerprint/1</seealso>
+ calculates it.
+ </item>
+ </list>
+ </item>
+ <item>A tuple <c>{HashAlgoSpec, CallbackFun}</c>. The <c>HashAlgoSpec</c> specifies which hash algorithm
+ shall be used to calculate the fingerprint used in the call of the <c>CallbackFun</c>. The <c>HashALgoSpec</c>
+ is either an atom or a list of atoms as the first argument in
+ <seealso marker="public_key:public_key#ssh_hostkey_fingerprint-2">public_key:ssh_hostkey_fingerprint/2</seealso>.
+ If it is a list of hash algorithm names, the <c>FingerPrint</c> argument in the <c>CallbackFun</c> will be
+ a list of fingerprints in the same order as the corresponding name in the <c>HashAlgoSpec</c> list.
+ </item>
+ </list>
</item>
<tag><c><![CDATA[{user_interaction, boolean()}]]></c></tag>
<item>
@@ -200,7 +226,7 @@
supplying a password. Defaults to <c>true</c>.
Even if user interaction is allowed it can be
suppressed by other options, such as <c>silently_accept_hosts</c>
- and <c>password</c>. However, those optins are not always desirable
+ and <c>password</c>. However, those options are not always desirable
to use from a security point of view.</p>
</item>
diff --git a/lib/ssh/doc/src/ssh_app.xml b/lib/ssh/doc/src/ssh_app.xml
index 5cc4c24889..5f710decc1 100644
--- a/lib/ssh/doc/src/ssh_app.xml
+++ b/lib/ssh/doc/src/ssh_app.xml
@@ -146,7 +146,10 @@
<item>diffie-hellman-group-exchange-sha1</item>
<item>diffie-hellman-group-exchange-sha256</item>
<item>diffie-hellman-group14-sha1</item>
- <item>diffie-hellman-group1-sha1</item>
+ <item>diffie-hellman-group14-sha256</item>
+ <item>diffie-hellman-group16-sha512</item>
+ <item>diffie-hellman-group18-sha512</item>
+ <item>(diffie-hellman-group1-sha1, retired: can be enabled with the <c>preferred_algorithms</c> option)</item>
</list>
</item>
@@ -157,7 +160,7 @@
<item>ecdsa-sha2-nistp384</item>
<item>ecdsa-sha2-nistp521</item>
<item>ssh-rsa</item>
- <item>ssh-dss</item>
+ <item>(ssh-dss, retired: can be enabled with the <c>preferred_algorithms</c> option)</item>
</list>
</item>
@@ -306,6 +309,8 @@
<p>Comment: Defines hmac-sha2-256 and hmac-sha2-512
</p>
</item>
+
+ <item>Work in progress: <url href="https://tools.ietf.org/html/draft-ietf-curdle-ssh-kex-sha2">https://tools.ietf.org/html/draft-ietf-curdle-ssh-kex-sha2-05</url>, Key Exchange (KEX) Method Updates and Recommendations for Secure Shell (SSH)</item>
</list>
diff --git a/lib/ssh/src/ssh.app.src b/lib/ssh/src/ssh.app.src
index 76b7d8cd55..2bb7491b0c 100644
--- a/lib/ssh/src/ssh.app.src
+++ b/lib/ssh/src/ssh.app.src
@@ -48,4 +48,3 @@
"stdlib-3.1"
]}]}.
-
diff --git a/lib/ssh/src/ssh.erl b/lib/ssh/src/ssh.erl
index 31e343e81b..68d98d3875 100644
--- a/lib/ssh/src/ssh.erl
+++ b/lib/ssh/src/ssh.erl
@@ -280,9 +280,11 @@ valid_socket_to_use(Socket, Options) ->
{error, {unsupported,L4}}
end.
-is_tcp_socket(Socket) -> {ok,[]} =/= inet:getopts(Socket, [delay_send]).
-
-
+is_tcp_socket(Socket) ->
+ case inet:getopts(Socket, [delay_send]) of
+ {ok,[_]} -> true;
+ _ -> false
+ end.
daemon_shell_opt(Options) ->
case proplists:get_value(shell, Options) of
@@ -317,6 +319,7 @@ start_daemon(Socket, Options) ->
do_start_daemon(Socket, [{role,server}|SshOptions], SocketOptions)
catch
throw:bad_fd -> {error,bad_fd};
+ throw:bad_socket -> {error,bad_socket};
_C:_E -> {error,{cannot_start_daemon,_C,_E}}
end;
{error,SockError} ->
@@ -333,6 +336,7 @@ start_daemon(Host, Port, Options, Inet) ->
do_start_daemon(Host, Port, [{role,server}|SshOptions] , [Inet|SocketOptions])
catch
throw:bad_fd -> {error,bad_fd};
+ throw:bad_socket -> {error,bad_socket};
_C:_E -> {error,{cannot_start_daemon,_C,_E}}
end
end.
@@ -362,8 +366,7 @@ do_start_daemon(Socket, SshOptions, SocketOptions) ->
{error, {already_started, _}} ->
{error, eaddrinuse};
Result = {ok,_} ->
- ssh_acceptor:handle_connection(Callback, Host, Port, Opts, Socket),
- Result;
+ call_ssh_acceptor_handle_connection(Callback, Host, Port, Opts, Socket, Result);
Result = {error, _} ->
Result
catch
@@ -376,8 +379,7 @@ do_start_daemon(Socket, SshOptions, SocketOptions) ->
{error, {already_started, _}} ->
{error, eaddrinuse};
{ok, _} ->
- ssh_acceptor:handle_connection(Callback, Host, Port, Opts, Socket),
- {ok, Sup};
+ call_ssh_acceptor_handle_connection(Callback, Host, Port, Opts, Socket, {ok, Sup});
Other ->
Other
end
@@ -447,6 +449,16 @@ do_start_daemon(Host0, Port0, SshOptions, SocketOptions) ->
end
end.
+call_ssh_acceptor_handle_connection(Callback, Host, Port, Opts, Socket, DefaultResult) ->
+ try ssh_acceptor:handle_connection(Callback, Host, Port, Opts, Socket)
+ of
+ {error,Error} -> {error,Error};
+ _ -> DefaultResult
+ catch
+ C:R -> {error,{could_not_start_connection,{C,R}}}
+ end.
+
+
sync_request_control(false) ->
ok;
sync_request_control({LSock,Callback}) ->
@@ -620,11 +632,22 @@ handle_ssh_option({silently_accept_hosts, Value} = Opt) when is_boolean(Value) -
handle_ssh_option({silently_accept_hosts, Value} = Opt) when is_function(Value,2) ->
Opt;
handle_ssh_option({silently_accept_hosts, {DigestAlg,Value}} = Opt) when is_function(Value,2) ->
- case lists:member(DigestAlg, [md5, sha, sha224, sha256, sha384, sha512]) of
- true ->
- Opt;
- false ->
- throw({error, {eoptions, Opt}})
+ Algs = if is_atom(DigestAlg) -> [DigestAlg];
+ is_list(DigestAlg) -> DigestAlg;
+ true -> throw({error, {eoptions, Opt}})
+ end,
+ case [A || A <- Algs,
+ not lists:member(A, [md5, sha, sha224, sha256, sha384, sha512])] of
+ [_|_] = UnSup1 ->
+ throw({error, {{eoptions, Opt}, {not_fingerprint_algos,UnSup1}}});
+ [] ->
+ CryptoHashAlgs = proplists:get_value(hashs, crypto:supports(), []),
+ case [A || A <- Algs,
+ not lists:member(A, CryptoHashAlgs)] of
+ [_|_] = UnSup2 ->
+ throw({error, {{eoptions, Opt}, {unsupported_algo,UnSup2}}});
+ [] -> Opt
+ end
end;
handle_ssh_option({user_interaction, Value} = Opt) when is_boolean(Value) ->
Opt;
diff --git a/lib/ssh/src/ssh_bits.erl b/lib/ssh/src/ssh_bits.erl
index 8bedaaf0c5..3ce7758447 100644
--- a/lib/ssh/src/ssh_bits.erl
+++ b/lib/ssh/src/ssh_bits.erl
@@ -30,39 +30,31 @@
-export([random/1]).
%%%----------------------------------------------------------------
-name_list([Name]) -> to_bin(Name);
-name_list([Name|Ns]) -> <<(to_bin(Name))/binary, ",", (name_list(Ns))/binary>>;
-name_list([]) -> <<>>.
-
-to_bin(A) when is_atom(A) -> list_to_binary(atom_to_list(A));
-to_bin(S) when is_list(S) -> list_to_binary(S);
-to_bin(B) when is_binary(B) -> B.
+name_list(NamesList) -> list_to_binary(lists:join($,, NamesList)).
%%%----------------------------------------------------------------
%%% Multi Precision Integer encoding
mpint(-1) -> <<0,0,0,1,16#ff>>;
mpint(0) -> <<0,0,0,0>>;
-mpint(X) when X < 0 -> mpint_neg(X,0,[]);
-mpint(X) -> mpint_pos(X,0,[]).
-
-mpint_neg(-1,I,Ds=[MSB|_]) ->
- if MSB band 16#80 =/= 16#80 ->
- <<?UINT32((I+1)), (list_to_binary([255|Ds]))/binary>>;
- true ->
- <<?UINT32(I), (list_to_binary(Ds))/binary>>
- end;
-mpint_neg(X,I,Ds) ->
- mpint_neg(X bsr 8,I+1,[(X band 255)|Ds]).
-
-mpint_pos(0,I,Ds=[MSB|_]) ->
- if MSB band 16#80 == 16#80 ->
- <<?UINT32((I+1)), (list_to_binary([0|Ds]))/binary>>;
- true ->
- <<?UINT32(I), (list_to_binary(Ds))/binary>>
+mpint(I) when I>0 ->
+ <<B1,V/binary>> = binary:encode_unsigned(I),
+ case B1 band 16#80 of
+ 16#80 ->
+ <<(size(V)+2):32/unsigned-big-integer, 0,B1,V/binary >>;
+ _ ->
+ <<(size(V)+1):32/unsigned-big-integer, B1,V/binary >>
end;
-mpint_pos(X,I,Ds) ->
- mpint_pos(X bsr 8,I+1,[(X band 255)|Ds]).
-
+mpint(N) when N<0 ->
+ Sxn = 8*size(binary:encode_unsigned(-N)),
+ Sxn1 = Sxn+8,
+ <<W:Sxn1>> = <<1, 0:Sxn>>,
+ <<B1,V/binary>> = binary:encode_unsigned(W+N),
+ case B1 band 16#80 of
+ 16#80 ->
+ <<(size(V)+1):32/unsigned-big-integer, B1,V/binary >>;
+ _ ->
+ <<(size(V)+2):32/unsigned-big-integer, 255,B1,V/binary >>
+ end.
%%%----------------------------------------------------------------
%% random/1
diff --git a/lib/ssh/src/ssh_cli.erl b/lib/ssh/src/ssh_cli.erl
index 8af0ecc5f9..6f8c050486 100644
--- a/lib/ssh/src/ssh_cli.erl
+++ b/lib/ssh/src/ssh_cli.erl
@@ -453,14 +453,20 @@ move_cursor(From, To, #ssh_pty{width=Width, term=Type}) ->
%% %%% make sure that there is data to send
%% %%% before calling ssh_connection:send
write_chars(ConnectionHandler, ChannelId, Chars) ->
- case erlang:iolist_size(Chars) of
- 0 ->
- ok;
- _ ->
- ssh_connection:send(ConnectionHandler, ChannelId,
- ?SSH_EXTENDED_DATA_DEFAULT, Chars)
+ case has_chars(Chars) of
+ false -> ok;
+ true -> ssh_connection:send(ConnectionHandler,
+ ChannelId,
+ ?SSH_EXTENDED_DATA_DEFAULT,
+ Chars)
end.
+has_chars([C|_]) when is_integer(C) -> true;
+has_chars([H|T]) when is_list(H) ; is_binary(H) -> has_chars(H) orelse has_chars(T);
+has_chars(<<_:8,_/binary>>) -> true;
+has_chars(_) -> false.
+
+
%%% tail, works with empty lists
tl1([_|A]) -> A;
tl1(_) -> [].
diff --git a/lib/ssh/src/ssh_connection_handler.erl b/lib/ssh/src/ssh_connection_handler.erl
index 7451c9e6d0..dcf509ca09 100644
--- a/lib/ssh/src/ssh_connection_handler.erl
+++ b/lib/ssh/src/ssh_connection_handler.erl
@@ -609,13 +609,15 @@ handle_event(_, #ssh_msg_kexdh_reply{} = Msg, {key_exchange,client,ReNeg}, D) ->
%%%---- diffie-hellman group exchange
handle_event(_, #ssh_msg_kex_dh_gex_request{} = Msg, {key_exchange,server,ReNeg}, D) ->
- {ok, GexGroup, Ssh} = ssh_transport:handle_kex_dh_gex_request(Msg, D#data.ssh_params),
+ {ok, GexGroup, Ssh1} = ssh_transport:handle_kex_dh_gex_request(Msg, D#data.ssh_params),
send_bytes(GexGroup, D),
+ Ssh = ssh_transport:parallell_gen_key(Ssh1),
{next_state, {key_exchange_dh_gex_init,server,ReNeg}, D#data{ssh_params=Ssh}};
handle_event(_, #ssh_msg_kex_dh_gex_request_old{} = Msg, {key_exchange,server,ReNeg}, D) ->
- {ok, GexGroup, Ssh} = ssh_transport:handle_kex_dh_gex_request(Msg, D#data.ssh_params),
+ {ok, GexGroup, Ssh1} = ssh_transport:handle_kex_dh_gex_request(Msg, D#data.ssh_params),
send_bytes(GexGroup, D),
+ Ssh = ssh_transport:parallell_gen_key(Ssh1),
{next_state, {key_exchange_dh_gex_init,server,ReNeg}, D#data{ssh_params=Ssh}};
handle_event(_, #ssh_msg_kex_dh_gex_group{} = Msg, {key_exchange,client,ReNeg}, D) ->
@@ -1206,7 +1208,7 @@ handle_event(info, {Proto, Sock, NewData}, StateName, D0 = #data{socket = Sock,
catch
_C:_E ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Encountered unexpected input"},
+ description = "Bad packet"},
StateName, D)
end;
@@ -1221,13 +1223,12 @@ handle_event(info, {Proto, Sock, NewData}, StateName, D0 = #data{socket = Sock,
{bad_mac, Ssh1} ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Bad mac"},
+ description = "Bad packet"},
StateName, D0#data{ssh_params=Ssh1});
- {error, {exceeds_max_size,PacketLen}} ->
+ {error, {exceeds_max_size,_PacketLen}} ->
disconnect(#ssh_msg_disconnect{code = ?SSH_DISCONNECT_PROTOCOL_ERROR,
- description = "Bad packet length "
- ++ integer_to_list(PacketLen)},
+ description = "Bad packet"},
StateName, D0)
catch
_C:_E ->
@@ -1480,31 +1481,36 @@ renegotiation(_) -> false.
%%--------------------------------------------------------------------
supported_host_keys(client, _, Options) ->
try
- case proplists:get_value(public_key,
- proplists:get_value(preferred_algorithms,Options,[])
- ) of
- undefined ->
- ssh_transport:default_algorithms(public_key);
- L ->
- L -- (L--ssh_transport:default_algorithms(public_key))
- end
+ find_sup_hkeys(Options)
of
[] ->
- {stop, {shutdown, "No public key algs"}};
+ error({shutdown, "No public key algs"});
Algs ->
[atom_to_list(A) || A<-Algs]
catch
exit:Reason ->
- {stop, {shutdown, Reason}}
+ error({shutdown, Reason})
end;
supported_host_keys(server, KeyCb, Options) ->
- [atom_to_list(A) || A <- proplists:get_value(public_key,
- proplists:get_value(preferred_algorithms,Options,[]),
- ssh_transport:default_algorithms(public_key)
- ),
+ [atom_to_list(A) || A <- find_sup_hkeys(Options),
available_host_key(KeyCb, A, Options)
].
+
+find_sup_hkeys(Options) ->
+ case proplists:get_value(public_key,
+ proplists:get_value(preferred_algorithms,Options,[])
+ )
+ of
+ undefined ->
+ ssh_transport:default_algorithms(public_key);
+ L ->
+ NonSupported = L--ssh_transport:supported_algorithms(public_key),
+ L -- NonSupported
+ end.
+
+
+
%% Alg :: atom()
available_host_key(KeyCb, Alg, Opts) ->
element(1, catch KeyCb:host_key(Alg, Opts)) == ok.
diff --git a/lib/ssh/src/ssh_dbg.erl b/lib/ssh/src/ssh_dbg.erl
index dff2bae9f2..0345bbdea7 100644
--- a/lib/ssh/src/ssh_dbg.erl
+++ b/lib/ssh/src/ssh_dbg.erl
@@ -50,50 +50,61 @@ messages(Write, MangleArg) when is_function(Write,2),
is_function(MangleArg,1) ->
catch dbg:start(),
setup_tracer(Write, MangleArg),
- dbg:p(new,c),
+ dbg:p(new,[c,timestamp]),
dbg_ssh_messages().
dbg_ssh_messages() ->
dbg:tp(ssh_message,encode,1, x),
dbg:tp(ssh_message,decode,1, x),
- dbg:tpl(ssh_transport,select_algorithm,3, x).
-
+ dbg:tpl(ssh_transport,select_algorithm,3, x),
+ dbg:tp(ssh_transport,hello_version_msg,1, x),
+ dbg:tp(ssh_transport,handle_hello_version,1, x).
+
%%%----------------------------------------------------------------
stop() ->
dbg:stop().
%%%================================================================
-msg_formater({trace,Pid,call,{ssh_message,encode,[Msg]}}, D) ->
- fmt("~nSEND ~p ~s~n", [Pid,wr_record(shrink_bin(Msg))], D);
-msg_formater({trace,_Pid,return_from,{ssh_message,encode,1},_Res}, D) ->
+msg_formater({trace_ts,Pid,call,{ssh_message,encode,[Msg]},TS}, D) ->
+ fmt("~n~s SEND ~p ~s~n", [ts(TS),Pid,wr_record(shrink_bin(Msg))], D);
+msg_formater({trace_ts,_Pid,return_from,{ssh_message,encode,1},_Res,_TS}, D) ->
D;
-msg_formater({trace,_Pid,call,{ssh_message,decode,_}}, D) ->
+msg_formater({trace_ts,_Pid,call,{ssh_message,decode,_},_TS}, D) ->
D;
-msg_formater({trace,Pid,return_from,{ssh_message,decode,1},Msg}, D) ->
- fmt("~n~p RECV ~s~n", [Pid,wr_record(shrink_bin(Msg))], D);
+msg_formater({trace_ts,Pid,return_from,{ssh_message,decode,1},Msg,TS}, D) ->
+ fmt("~n~s ~p RECV ~s~n", [ts(TS),Pid,wr_record(shrink_bin(Msg))], D);
-msg_formater({trace,_Pid,call,{ssh_transport,select_algorithm,_}}, D) ->
+msg_formater({trace_ts,_Pid,call,{ssh_transport,select_algorithm,_},_TS}, D) ->
+ D;
+msg_formater({trace_ts,Pid,return_from,{ssh_transport,select_algorithm,3},{ok,Alg},TS}, D) ->
+ fmt("~n~s ~p ALGORITHMS~n~s~n", [ts(TS),Pid, wr_record(Alg)], D);
+
+msg_formater({trace_ts,_Pid,call,{ssh_transport,hello_version_msg,_},_TS}, D) ->
D;
-msg_formater({trace,Pid,return_from,{ssh_transport,select_algorithm,3},{ok,Alg}}, D) ->
- fmt("~n~p ALGORITHMS~n~s~n", [Pid, wr_record(Alg)], D);
+msg_formater({trace_ts,Pid,return_from,{ssh_transport,hello_version_msg,1},Hello,TS}, D) ->
+ fmt("~n~s ~p TCP SEND HELLO~n ~p~n", [ts(TS),Pid,lists:flatten(Hello)], D);
+msg_formater({trace_ts,Pid,call,{ssh_transport,handle_hello_version,[Hello]},TS}, D) ->
+ fmt("~n~s ~p RECV HELLO~n ~p~n", [ts(TS),Pid,lists:flatten(Hello)], D);
+msg_formater({trace_ts,_Pid,return_from,{ssh_transport,handle_hello_version,1},_,_TS}, D) ->
+ D;
-msg_formater({trace,Pid,send,{tcp,Sock,Bytes},Pid}, D) ->
- fmt("~n~p TCP SEND on ~p~n ~p~n", [Pid,Sock, shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,send,{tcp,Sock,Bytes},Pid,TS}, D) ->
+ fmt("~n~s ~p TCP SEND on ~p~n ~p~n", [ts(TS),Pid,Sock, shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,send,{tcp,Sock,Bytes},Dest}, D) ->
- fmt("~n~p TCP SEND from ~p TO ~p~n ~p~n", [Pid,Sock,Dest, shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,send,{tcp,Sock,Bytes},Dest,TS}, D) ->
+ fmt("~n~s ~p TCP SEND from ~p TO ~p~n ~p~n", [ts(TS),Pid,Sock,Dest, shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,send,ErlangMsg,Dest}, D) ->
- fmt("~n~p ERL MSG SEND TO ~p~n ~p~n", [Pid,Dest, shrink_bin(ErlangMsg)], D);
+msg_formater({trace_ts,Pid,send,ErlangMsg,Dest,TS}, D) ->
+ fmt("~n~s ~p ERL MSG SEND TO ~p~n ~p~n", [ts(TS),Pid,Dest, shrink_bin(ErlangMsg)], D);
-msg_formater({trace,Pid,'receive',{tcp,Sock,Bytes}}, D) ->
- fmt("~n~p TCP RECEIVE on ~p~n ~p~n", [Pid,Sock,shrink_bin(Bytes)], D);
+msg_formater({trace_ts,Pid,'receive',{tcp,Sock,Bytes},TS}, D) ->
+ fmt("~n~s ~p TCP RECEIVE on ~p~n ~p~n", [ts(TS),Pid,Sock,shrink_bin(Bytes)], D);
-msg_formater({trace,Pid,'receive',ErlangMsg}, D) ->
- fmt("~n~p ERL MSG RECEIVE~n ~p~n", [Pid,shrink_bin(ErlangMsg)], D);
+msg_formater({trace_ts,Pid,'receive',ErlangMsg,TS}, D) ->
+ fmt("~n~s ~p ERL MSG RECEIVE~n ~p~n", [ts(TS),Pid,shrink_bin(ErlangMsg)], D);
msg_formater(M, D) ->
@@ -106,6 +117,11 @@ msg_formater(M, D) ->
fmt(Fmt, Args, D=#data{writer=Write,acc=Acc}) ->
D#data{acc = Write(io_lib:format(Fmt, Args), Acc)}.
+ts({_,_,Usec}=Now) ->
+ {_Date,{HH,MM,SS}} = calendar:now_to_local_time(Now),
+ io_lib:format("~.2.0w:~.2.0w:~.2.0w.~.6.0w",[HH,MM,SS,Usec]);
+ts(_) ->
+ "-".
%%%----------------------------------------------------------------
setup_tracer(Write, MangleArg) ->
Handler = fun(Arg, D) ->
@@ -116,11 +132,11 @@ setup_tracer(Write, MangleArg) ->
ok.
%%%----------------------------------------------------------------
-shrink_bin(B) when is_binary(B), size(B)>100 -> {'*** SHRINKED BIN',
+shrink_bin(B) when is_binary(B), size(B)>256 -> {'*** SHRINKED BIN',
size(B),
- element(1,split_binary(B,20)),
+ element(1,split_binary(B,64)),
'...',
- element(2,split_binary(B,size(B)-20))
+ element(2,split_binary(B,size(B)-64))
};
shrink_bin(L) when is_list(L) -> lists:map(fun shrink_bin/1, L);
shrink_bin(T) when is_tuple(T) -> list_to_tuple(shrink_bin(tuple_to_list(T)));
diff --git a/lib/ssh/src/ssh_sftp.erl b/lib/ssh/src/ssh_sftp.erl
index b937f0412d..8d994cdb43 100644
--- a/lib/ssh/src/ssh_sftp.erl
+++ b/lib/ssh/src/ssh_sftp.erl
@@ -294,7 +294,7 @@ read(Pid, Handle, Len) ->
read(Pid, Handle, Len, FileOpTimeout) ->
call(Pid, {read,false,Handle, Len}, FileOpTimeout).
-%% TODO this ought to be a cast! Is so in all practial meaning
+%% TODO this ought to be a cast! Is so in all practical meaning
%% even if it is obscure!
apread(Pid, Handle, Offset, Len) ->
call(Pid, {pread,true,Handle, Offset, Len}, infinity).
@@ -313,12 +313,12 @@ write(Pid, Handle, Data) ->
write(Pid, Handle, Data, FileOpTimeout) ->
call(Pid, {write,false,Handle,Data}, FileOpTimeout).
-%% TODO this ought to be a cast! Is so in all practial meaning
+%% TODO this ought to be a cast! Is so in all practical meaning
%% even if it is obscure!
apwrite(Pid, Handle, Offset, Data) ->
call(Pid, {pwrite,true,Handle,Offset,Data}, infinity).
-%% TODO this ought to be a cast! Is so in all practial meaning
+%% TODO this ought to be a cast! Is so in all practical meaning
%% even if it is obscure!
awrite(Pid, Handle, Data) ->
call(Pid, {write,true,Handle,Data}, infinity).
diff --git a/lib/ssh/src/ssh_sftpd.erl b/lib/ssh/src/ssh_sftpd.erl
index b739955836..9352046795 100644
--- a/lib/ssh/src/ssh_sftpd.erl
+++ b/lib/ssh/src/ssh_sftpd.erl
@@ -664,29 +664,25 @@ open(Vsn, ReqId, Data, State) when Vsn >= 4 ->
do_open(ReqId, State, Path, Flags).
do_open(ReqId, State0, Path, Flags) ->
- #state{file_handler = FileMod, file_state = FS0, root = Root, xf = #ssh_xfer{vsn = Vsn}} = State0,
- XF = State0#state.xf,
- F = [binary | Flags],
- {IsDir, _FS1} = FileMod:is_dir(Path, FS0),
+ #state{file_handler = FileMod, file_state = FS0, xf = #ssh_xfer{vsn = Vsn}} = State0,
+ AbsPath = relate_file_name(Path, State0),
+ {IsDir, _FS1} = FileMod:is_dir(AbsPath, FS0),
case IsDir of
true when Vsn > 5 ->
ssh_xfer:xf_send_status(State0#state.xf, ReqId,
- ?SSH_FX_FILE_IS_A_DIRECTORY, "File is a directory");
+ ?SSH_FX_FILE_IS_A_DIRECTORY, "File is a directory"),
+ State0;
true ->
ssh_xfer:xf_send_status(State0#state.xf, ReqId,
- ?SSH_FX_FAILURE, "File is a directory");
+ ?SSH_FX_FAILURE, "File is a directory"),
+ State0;
false ->
- AbsPath = case Root of
- "" ->
- Path;
- _ ->
- relate_file_name(Path, State0)
- end,
- {Res, FS1} = FileMod:open(AbsPath, F, FS0),
+ OpenFlags = [binary | Flags],
+ {Res, FS1} = FileMod:open(AbsPath, OpenFlags, FS0),
State1 = State0#state{file_state = FS1},
case Res of
{ok, IoDevice} ->
- add_handle(State1, XF, ReqId, file, {Path,IoDevice});
+ add_handle(State1, State0#state.xf, ReqId, file, {Path,IoDevice});
{error, Error} ->
ssh_xfer:xf_send_status(State1#state.xf, ReqId,
ssh_xfer:encode_erlang_status(Error)),
@@ -742,6 +738,10 @@ resolve_symlinks_2([], State, _LinkCnt, AccPath) ->
{{ok, AccPath}, State}.
+%% The File argument is always in a user visible file system, i.e.
+%% is under Root and is relative to CWD or Root, if starts with "/".
+%% The result of the function is always an absolute path in a
+%% "backend" file system.
relate_file_name(File, State) ->
relate_file_name(File, State, _Canonicalize=true).
@@ -749,19 +749,20 @@ relate_file_name(File, State, Canonicalize) when is_binary(File) ->
relate_file_name(unicode:characters_to_list(File), State, Canonicalize);
relate_file_name(File, #state{cwd = CWD, root = ""}, Canonicalize) ->
relate_filename_to_path(File, CWD, Canonicalize);
-relate_file_name(File, #state{root = Root}, Canonicalize) ->
- case is_within_root(Root, File) of
- true ->
- File;
- false ->
- RelFile = make_relative_filename(File),
- NewFile = relate_filename_to_path(RelFile, Root, Canonicalize),
- case is_within_root(Root, NewFile) of
- true ->
- NewFile;
- false ->
- Root
- end
+relate_file_name(File, #state{cwd = CWD, root = Root}, Canonicalize) ->
+ CWD1 = case is_within_root(Root, CWD) of
+ true -> CWD;
+ false -> Root
+ end,
+ AbsFile = case make_relative_filename(File) of
+ File ->
+ relate_filename_to_path(File, CWD1, Canonicalize);
+ RelFile ->
+ relate_filename_to_path(RelFile, Root, Canonicalize)
+ end,
+ case is_within_root(Root, AbsFile) of
+ true -> AbsFile;
+ false -> Root
end.
is_within_root(Root, File) ->
diff --git a/lib/ssh/src/ssh_transport.erl b/lib/ssh/src/ssh_transport.erl
index 21ba34506a..a17ad560d1 100644
--- a/lib/ssh/src/ssh_transport.erl
+++ b/lib/ssh/src/ssh_transport.erl
@@ -44,6 +44,7 @@
handle_kexdh_reply/2,
handle_kex_ecdh_init/2,
handle_kex_ecdh_reply/2,
+ parallell_gen_key/1,
extract_public_key/1,
ssh_packet/2, pack/2,
sha/1, sign/3, verify/4]).
@@ -78,6 +79,10 @@ default_algorithms() -> [{K,default_algorithms(K)} || K <- algo_classes()].
algo_classes() -> [kex, public_key, cipher, mac, compression].
+default_algorithms(kex) ->
+ supported_algorithms(kex, [
+ 'diffie-hellman-group1-sha1' % Gone in OpenSSH 7.3.p1
+ ]);
default_algorithms(cipher) ->
supported_algorithms(cipher, same(['AEAD_AES_128_GCM',
@@ -94,34 +99,39 @@ supported_algorithms() -> [{K,supported_algorithms(K)} || K <- algo_classes()].
supported_algorithms(kex) ->
select_crypto_supported(
[
- {'ecdh-sha2-nistp256', [{public_keys,ecdh}, {ec_curve,secp256r1}, {hashs,sha256}]},
{'ecdh-sha2-nistp384', [{public_keys,ecdh}, {ec_curve,secp384r1}, {hashs,sha384}]},
- {'diffie-hellman-group14-sha1', [{public_keys,dh}, {hashs,sha}]},
+ {'ecdh-sha2-nistp521', [{public_keys,ecdh}, {ec_curve,secp521r1}, {hashs,sha512}]},
+ {'ecdh-sha2-nistp256', [{public_keys,ecdh}, {ec_curve,secp256r1}, {hashs,sha256}]},
{'diffie-hellman-group-exchange-sha256', [{public_keys,dh}, {hashs,sha256}]},
+ {'diffie-hellman-group16-sha512', [{public_keys,dh}, {hashs,sha512}]}, % In OpenSSH 7.3.p1
+ {'diffie-hellman-group18-sha512', [{public_keys,dh}, {hashs,sha512}]}, % In OpenSSH 7.3.p1
+ {'diffie-hellman-group14-sha256', [{public_keys,dh}, {hashs,sha256}]}, % In OpenSSH 7.3.p1
+ {'diffie-hellman-group14-sha1', [{public_keys,dh}, {hashs,sha}]},
{'diffie-hellman-group-exchange-sha1', [{public_keys,dh}, {hashs,sha}]},
- {'ecdh-sha2-nistp521', [{public_keys,ecdh}, {ec_curve,secp521r1}, {hashs,sha512}]},
{'diffie-hellman-group1-sha1', [{public_keys,dh}, {hashs,sha}]}
]);
supported_algorithms(public_key) ->
select_crypto_supported(
- [{'ecdsa-sha2-nistp256', [{public_keys,ecdsa}, {hashs,sha256}, {ec_curve,secp256r1}]},
+ [
{'ecdsa-sha2-nistp384', [{public_keys,ecdsa}, {hashs,sha384}, {ec_curve,secp384r1}]},
{'ecdsa-sha2-nistp521', [{public_keys,ecdsa}, {hashs,sha512}, {ec_curve,secp521r1}]},
+ {'ecdsa-sha2-nistp256', [{public_keys,ecdsa}, {hashs,sha256}, {ec_curve,secp256r1}]},
{'ssh-rsa', [{public_keys,rsa}, {hashs,sha} ]},
- {'ssh-dss', [{public_keys,dss}, {hashs,sha} ]}
+ {'ssh-dss', [{public_keys,dss}, {hashs,sha} ]} % Gone in OpenSSH 7.3.p1
]);
supported_algorithms(cipher) ->
same(
select_crypto_supported(
- [{'aes256-ctr', [{ciphers,{aes_ctr,256}}]},
- {'aes192-ctr', [{ciphers,{aes_ctr,192}}]},
- {'aes128-ctr', [{ciphers,{aes_ctr,128}}]},
- {'aes128-cbc', [{ciphers,aes_cbc128}]},
+ [
+ {'[email protected]', [{ciphers,{aes_gcm,256}}]},
+ {'aes256-ctr', [{ciphers,{aes_ctr,256}}]},
+ {'aes192-ctr', [{ciphers,{aes_ctr,192}}]},
{'[email protected]', [{ciphers,{aes_gcm,128}}]},
- {'[email protected]', [{ciphers,{aes_gcm,256}}]},
- {'AEAD_AES_128_GCM', [{ciphers,{aes_gcm,128}}]},
+ {'aes128-ctr', [{ciphers,{aes_ctr,128}}]},
{'AEAD_AES_256_GCM', [{ciphers,{aes_gcm,256}}]},
+ {'AEAD_AES_128_GCM', [{ciphers,{aes_gcm,128}}]},
+ {'aes128-cbc', [{ciphers,aes_cbc128}]},
{'3des-cbc', [{ciphers,des3_cbc}]}
]
));
@@ -274,11 +284,12 @@ handle_kexinit_msg(#ssh_msg_kexinit{} = CounterPart, #ssh_msg_kexinit{} = Own,
true ->
key_exchange_first_msg(Algoritms#alg.kex,
Ssh0#ssh{algorithms = Algoritms});
- _ ->
+ {false,Alg} ->
%% TODO: Correct code?
ssh_connection_handler:disconnect(
#ssh_msg_disconnect{code = ?SSH_DISCONNECT_KEY_EXCHANGE_FAILED,
- description = "Selection of key exchange algorithm failed"
+ description = "Selection of key exchange algorithm failed: "
+ ++ Alg
})
end;
@@ -288,45 +299,63 @@ handle_kexinit_msg(#ssh_msg_kexinit{} = CounterPart, #ssh_msg_kexinit{} = Own,
case verify_algorithm(Algoritms) of
true ->
{ok, Ssh#ssh{algorithms = Algoritms}};
- _ ->
+ {false,Alg} ->
ssh_connection_handler:disconnect(
#ssh_msg_disconnect{code = ?SSH_DISCONNECT_KEY_EXCHANGE_FAILED,
- description = "Selection of key exchange algorithm failed"
+ description = "Selection of key exchange algorithm failed: "
+ ++ Alg
})
end.
-%% TODO: diffie-hellman-group14-sha1 should also be supported.
-%% Maybe check more things ...
-
-verify_algorithm(#alg{kex = undefined}) -> false;
-verify_algorithm(#alg{hkey = undefined}) -> false;
-verify_algorithm(#alg{send_mac = undefined}) -> false;
-verify_algorithm(#alg{recv_mac = undefined}) -> false;
-verify_algorithm(#alg{encrypt = undefined}) -> false;
-verify_algorithm(#alg{decrypt = undefined}) -> false;
-verify_algorithm(#alg{compress = undefined}) -> false;
-verify_algorithm(#alg{decompress = undefined}) -> false;
-verify_algorithm(#alg{kex = Kex}) -> lists:member(Kex, supported_algorithms(kex)).
+verify_algorithm(#alg{kex = undefined}) -> {false, "kex"};
+verify_algorithm(#alg{hkey = undefined}) -> {false, "hkey"};
+verify_algorithm(#alg{send_mac = undefined}) -> {false, "send_mac"};
+verify_algorithm(#alg{recv_mac = undefined}) -> {false, "recv_mac"};
+verify_algorithm(#alg{encrypt = undefined}) -> {false, "encrypt"};
+verify_algorithm(#alg{decrypt = undefined}) -> {false, "decrypt"};
+verify_algorithm(#alg{compress = undefined}) -> {false, "compress"};
+verify_algorithm(#alg{decompress = undefined}) -> {false, "decompress"};
+verify_algorithm(#alg{kex = Kex}) ->
+ case lists:member(Kex, supported_algorithms(kex)) of
+ true -> true;
+ false -> {false, "kex"}
+ end.
%%%----------------------------------------------------------------
%%%
%%% Key exchange initialization
%%%
key_exchange_first_msg(Kex, Ssh0) when Kex == 'diffie-hellman-group1-sha1' ;
- Kex == 'diffie-hellman-group14-sha1' ->
+ Kex == 'diffie-hellman-group14-sha1' ;
+ Kex == 'diffie-hellman-group14-sha256' ;
+ Kex == 'diffie-hellman-group16-sha512' ;
+ Kex == 'diffie-hellman-group18-sha512'
+ ->
{G, P} = dh_group(Kex),
- {Public, Private} = generate_key(dh, [P,G]),
+ Sz = dh_bits(Ssh0#ssh.algorithms),
+ {Public, Private} = generate_key(dh, [P,G,2*Sz]),
{SshPacket, Ssh1} = ssh_packet(#ssh_msg_kexdh_init{e = Public}, Ssh0),
{ok, SshPacket,
Ssh1#ssh{keyex_key = {{Private, Public}, {G, P}}}};
key_exchange_first_msg(Kex, Ssh0=#ssh{opts=Opts}) when Kex == 'diffie-hellman-group-exchange-sha1' ;
Kex == 'diffie-hellman-group-exchange-sha256' ->
- {Min,NBits,Max} =
+ {Min,NBits0,Max} =
proplists:get_value(dh_gex_limits, Opts, {?DEFAULT_DH_GROUP_MIN,
?DEFAULT_DH_GROUP_NBITS,
?DEFAULT_DH_GROUP_MAX}),
+ DhBits = dh_bits(Ssh0#ssh.algorithms),
+ NBits1 =
+ %% NIST Special Publication 800-57 Part 1 Revision 4: Recommendation for Key Management
+ if
+ DhBits =< 112 -> 2048;
+ DhBits =< 128 -> 3072;
+ DhBits =< 192 -> 7680;
+ true -> 8192
+ end,
+ NBits = min(max(max(NBits0,NBits1),Min), Max),
+
{SshPacket, Ssh1} =
ssh_packet(#ssh_msg_kex_dh_gex_request{min = Min,
n = NBits,
@@ -348,14 +377,18 @@ key_exchange_first_msg(Kex, Ssh0) when Kex == 'ecdh-sha2-nistp256' ;
%%%
%%% diffie-hellman-group1-sha1
%%% diffie-hellman-group14-sha1
+%%% diffie-hellman-group14-sha256
+%%% diffie-hellman-group16-sha512
+%%% diffie-hellman-group18-sha512
%%%
handle_kexdh_init(#ssh_msg_kexdh_init{e = E},
- Ssh0 = #ssh{algorithms = #alg{kex=Kex}}) ->
+ Ssh0 = #ssh{algorithms = #alg{kex=Kex} = Algs}) ->
%% server
{G, P} = dh_group(Kex),
if
1=<E, E=<(P-1) ->
- {Public, Private} = generate_key(dh, [P,G]),
+ Sz = dh_bits(Algs),
+ {Public, Private} = generate_key(dh, [P,G,2*Sz]),
K = compute_key(dh, E, Private, [P,G]),
MyPrivHostKey = get_host_key(Ssh0),
MyPubHostKey = extract_public_key(MyPrivHostKey),
@@ -367,7 +400,7 @@ handle_kexdh_init(#ssh_msg_kexdh_init{e = E},
h_sig = H_SIG
}, Ssh0),
{ok, SshPacket, Ssh1#ssh{keyex_key = {{Private, Public}, {G, P}},
- shared_secret = K,
+ shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh1, H)}};
@@ -393,7 +426,7 @@ handle_kexdh_reply(#ssh_msg_kexdh_reply{public_host_key = PeerPubHostKey,
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
Error ->
@@ -426,13 +459,12 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request{min = Min0,
{Min, Max} = adjust_gex_min_max(Min0, Max0, Opts),
case public_key:dh_gex_group(Min, NBits, Max,
proplists:get_value(dh_gex_groups,Opts)) of
- {ok, {_Sz, {G,P}}} ->
- {Public, Private} = generate_key(dh, [P,G]),
+ {ok, {_, {G,P}}} ->
{SshPacket, Ssh} =
ssh_packet(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0),
{ok, SshPacket,
- Ssh#ssh{keyex_key = {{Private, Public}, {G, P}},
- keyex_info = {Min, Max, NBits}
+ Ssh#ssh{keyex_key = {x, {G, P}},
+ keyex_info = {Min0, Max0, NBits}
}};
{error,_} ->
ssh_connection_handler:disconnect(
@@ -449,7 +481,7 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request_old{n = NBits},
%% This message was in the draft-00 of rfc4419
%% (https://tools.ietf.org/html/draft-ietf-secsh-dh-group-exchange-00)
%% In later drafts and the rfc is "is used for backward compatibility".
- %% Unfortunatly the rfc does not specify how to treat the parameter n
+ %% Unfortunately the rfc does not specify how to treat the parameter n
%% if there is no group of that modulus length :(
%% The draft-00 however specifies that n is the "... number of bits
%% the subgroup should have at least".
@@ -461,12 +493,11 @@ handle_kex_dh_gex_request(#ssh_msg_kex_dh_gex_request_old{n = NBits},
{Min, Max} = adjust_gex_min_max(Min0, Max0, Opts),
case public_key:dh_gex_group(Min, NBits, Max,
proplists:get_value(dh_gex_groups,Opts)) of
- {ok, {_Sz, {G,P}}} ->
- {Public, Private} = generate_key(dh, [P,G]),
+ {ok, {_, {G,P}}} ->
{SshPacket, Ssh} =
ssh_packet(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0),
{ok, SshPacket,
- Ssh#ssh{keyex_key = {{Private, Public}, {G, P}},
+ Ssh#ssh{keyex_key = {x, {G, P}},
keyex_info = {-1, -1, NBits} % flag for kex_h hash calc
}};
{error,_} ->
@@ -507,7 +538,8 @@ adjust_gex_min_max(Min0, Max0, Opts) ->
handle_kex_dh_gex_group(#ssh_msg_kex_dh_gex_group{p = P, g = G}, Ssh0) ->
%% client
- {Public, Private} = generate_key(dh, [P,G]),
+ Sz = dh_bits(Ssh0#ssh.algorithms),
+ {Public, Private} = generate_key(dh, [P,G,2*Sz]),
{SshPacket, Ssh1} =
ssh_packet(#ssh_msg_kex_dh_gex_init{e = Public}, Ssh0), % Pub = G^Priv mod P (def)
@@ -532,7 +564,7 @@ handle_kex_dh_gex_init(#ssh_msg_kex_dh_gex_init{e = E},
ssh_packet(#ssh_msg_kex_dh_gex_reply{public_host_key = MyPubHostKey,
f = Public,
h_sig = H_SIG}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)
}};
@@ -568,7 +600,7 @@ handle_kex_dh_gex_reply(#ssh_msg_kex_dh_gex_reply{public_host_key = PeerPubHostK
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
_Error ->
@@ -618,7 +650,7 @@ handle_kex_ecdh_init(#ssh_msg_kex_ecdh_init{q_c = PeerPublic},
h_sig = H_SIG},
Ssh0),
{ok, SshPacket, Ssh1#ssh{keyex_key = {{MyPublic,MyPrivate},Curve},
- shared_secret = K,
+ shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh1, H)}}
catch
@@ -644,7 +676,7 @@ handle_kex_ecdh_reply(#ssh_msg_kex_ecdh_reply{public_host_key = PeerPubHostKey,
case verify_host_key(Ssh0, PeerPubHostKey, H, H_SIG) of
ok ->
{SshPacket, Ssh} = ssh_packet(#ssh_msg_newkeys{}, Ssh0),
- {ok, SshPacket, Ssh#ssh{shared_secret = K,
+ {ok, SshPacket, Ssh#ssh{shared_secret = ssh_bits:mpint(K),
exchanged_hash = H,
session_id = sid(Ssh, H)}};
Error ->
@@ -746,9 +778,8 @@ accepted_host(Ssh, PeerName, Public, Opts) ->
yes == yes_no(Ssh, "New host " ++ PeerName ++ " accept")
end.
-known_host_key(#ssh{opts = Opts, key_cb = Mod, peer = Peer} = Ssh,
+known_host_key(#ssh{opts = Opts, key_cb = Mod, peer = {PeerName,_}} = Ssh,
Public, Alg) ->
- PeerName = peer_name(Peer),
case Mod:is_host_key(Public, PeerName, Alg, Opts) of
true ->
ok;
@@ -1117,6 +1148,51 @@ verify(PlainText, Hash, Sig, Key) ->
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%% Unit: bytes
+
+-record(cipher_data, {
+ key_bytes,
+ iv_bytes,
+ block_bytes
+ }).
+
+%%% Start of a more parameterized crypto handling.
+cipher('AEAD_AES_128_GCM') ->
+ #cipher_data{key_bytes = 16,
+ iv_bytes = 12,
+ block_bytes = 16};
+
+cipher('AEAD_AES_256_GCM') ->
+ #cipher_data{key_bytes = 32,
+ iv_bytes = 12,
+ block_bytes = 16};
+
+cipher('3des-cbc') ->
+ #cipher_data{key_bytes = 24,
+ iv_bytes = 8,
+ block_bytes = 8};
+
+cipher('aes128-cbc') ->
+ #cipher_data{key_bytes = 16,
+ iv_bytes = 16,
+ block_bytes = 16};
+
+cipher('aes128-ctr') ->
+ #cipher_data{key_bytes = 16,
+ iv_bytes = 16,
+ block_bytes = 16};
+
+cipher('aes192-ctr') ->
+ #cipher_data{key_bytes = 24,
+ iv_bytes = 16,
+ block_bytes = 16};
+
+cipher('aes256-ctr') ->
+ #cipher_data{key_bytes = 32,
+ iv_bytes = 16,
+ block_bytes = 16}.
+
+
encrypt_init(#ssh{encrypt = none} = Ssh) ->
{ok, Ssh};
encrypt_init(#ssh{encrypt = 'AEAD_AES_128_GCM', role = client} = Ssh) ->
@@ -1497,11 +1573,11 @@ send_mac_init(SSH) ->
common ->
case SSH#ssh.role of
client ->
- KeySize = mac_key_size(SSH#ssh.send_mac),
+ KeySize = 8*mac_key_bytes(SSH#ssh.send_mac),
Key = hash(SSH, "E", KeySize),
{ok, SSH#ssh { send_mac_key = Key }};
server ->
- KeySize = mac_key_size(SSH#ssh.send_mac),
+ KeySize = 8*mac_key_bytes(SSH#ssh.send_mac),
Key = hash(SSH, "F", KeySize),
{ok, SSH#ssh { send_mac_key = Key }}
end;
@@ -1520,10 +1596,10 @@ recv_mac_init(SSH) ->
common ->
case SSH#ssh.role of
client ->
- Key = hash(SSH, "F", mac_key_size(SSH#ssh.recv_mac)),
+ Key = hash(SSH, "F", 8*mac_key_bytes(SSH#ssh.recv_mac)),
{ok, SSH#ssh { recv_mac_key = Key }};
server ->
- Key = hash(SSH, "E", mac_key_size(SSH#ssh.recv_mac)),
+ Key = hash(SSH, "E", 8*mac_key_bytes(SSH#ssh.recv_mac)),
{ok, SSH#ssh { recv_mac_key = Key }}
end;
aead ->
@@ -1549,48 +1625,27 @@ mac('hmac-sha2-256', Key, SeqNum, Data) ->
mac('hmac-sha2-512', Key, SeqNum, Data) ->
crypto:hmac(sha512, Key, [<<?UINT32(SeqNum)>>, Data]).
-%% return N hash bytes (HASH)
-hash(SSH, Char, Bits) ->
- HASH =
- case SSH#ssh.kex of
- 'diffie-hellman-group1-sha1' ->
- fun(Data) -> crypto:hash(sha, Data) end;
- 'diffie-hellman-group14-sha1' ->
- fun(Data) -> crypto:hash(sha, Data) end;
-
- 'diffie-hellman-group-exchange-sha1' ->
- fun(Data) -> crypto:hash(sha, Data) end;
- 'diffie-hellman-group-exchange-sha256' ->
- fun(Data) -> crypto:hash(sha256, Data) end;
-
- 'ecdh-sha2-nistp256' ->
- fun(Data) -> crypto:hash(sha256,Data) end;
- 'ecdh-sha2-nistp384' ->
- fun(Data) -> crypto:hash(sha384,Data) end;
- 'ecdh-sha2-nistp521' ->
- fun(Data) -> crypto:hash(sha512,Data) end;
- _ ->
- exit({bad_algorithm,SSH#ssh.kex})
- end,
- hash(SSH, Char, Bits, HASH).
-hash(_SSH, _Char, 0, _HASH) ->
+%%%----------------------------------------------------------------
+%% return N hash bytes (HASH)
+hash(_SSH, _Char, 0) ->
<<>>;
-hash(SSH, Char, N, HASH) ->
- K = ssh_bits:mpint(SSH#ssh.shared_secret),
+hash(SSH, Char, N) ->
+ HashAlg = sha(SSH#ssh.kex),
+ K = SSH#ssh.shared_secret,
H = SSH#ssh.exchanged_hash,
- SessionID = SSH#ssh.session_id,
- K1 = HASH([K, H, Char, SessionID]),
+ K1 = crypto:hash(HashAlg, [K, H, Char, SSH#ssh.session_id]),
Sz = N div 8,
- <<Key:Sz/binary, _/binary>> = hash(K, H, K1, N-128, HASH),
+ <<Key:Sz/binary, _/binary>> = hash(K, H, K1, N-128, HashAlg),
Key.
-hash(_K, _H, Ki, N, _HASH) when N =< 0 ->
+hash(_K, _H, Ki, N, _HashAlg) when N =< 0 ->
Ki;
-hash(K, H, Ki, N, HASH) ->
- Kj = HASH([K, H, Ki]),
- hash(K, H, <<Ki/binary, Kj/binary>>, N-128, HASH).
+hash(K, H, Ki, N, HashAlg) ->
+ Kj = crypto:hash(HashAlg, [K, H, Ki]),
+ hash(K, H, <<Ki/binary, Kj/binary>>, N-128, HashAlg).
+%%%----------------------------------------------------------------
kex_h(SSH, Key, E, F, K) ->
KeyBin = public_key:ssh_encode(Key, ssh2_pubkey),
L = <<?Estring(SSH#ssh.c_version), ?Estring(SSH#ssh.s_version),
@@ -1633,20 +1688,28 @@ sha(secp384r1) -> sha384;
sha(secp521r1) -> sha512;
sha('diffie-hellman-group1-sha1') -> sha;
sha('diffie-hellman-group14-sha1') -> sha;
+sha('diffie-hellman-group14-sha256') -> sha256;
+sha('diffie-hellman-group16-sha512') -> sha512;
+sha('diffie-hellman-group18-sha512') -> sha512;
sha('diffie-hellman-group-exchange-sha1') -> sha;
sha('diffie-hellman-group-exchange-sha256') -> sha256;
sha(?'secp256r1') -> sha(secp256r1);
sha(?'secp384r1') -> sha(secp384r1);
-sha(?'secp521r1') -> sha(secp521r1).
-
-
-mac_key_size('hmac-sha1') -> 20*8;
-mac_key_size('hmac-sha1-96') -> 20*8;
-mac_key_size('hmac-md5') -> 16*8;
-mac_key_size('hmac-md5-96') -> 16*8;
-mac_key_size('hmac-sha2-256')-> 32*8;
-mac_key_size('hmac-sha2-512')-> 512;
-mac_key_size(none) -> 0.
+sha(?'secp521r1') -> sha(secp521r1);
+sha('ecdh-sha2-nistp256') -> sha(secp256r1);
+sha('ecdh-sha2-nistp384') -> sha(secp384r1);
+sha('ecdh-sha2-nistp521') -> sha(secp521r1).
+
+
+mac_key_bytes('hmac-sha1') -> 20;
+mac_key_bytes('hmac-sha1-96') -> 20;
+mac_key_bytes('hmac-md5') -> 16;
+mac_key_bytes('hmac-md5-96') -> 16;
+mac_key_bytes('hmac-sha2-256')-> 32;
+mac_key_bytes('hmac-sha2-512')-> 64;
+mac_key_bytes('AEAD_AES_128_GCM') -> 0;
+mac_key_bytes('AEAD_AES_256_GCM') -> 0;
+mac_key_bytes(none) -> 0.
mac_digest_size('hmac-sha1') -> 20;
mac_digest_size('hmac-sha1-96') -> 12;
@@ -1658,9 +1721,6 @@ mac_digest_size('AEAD_AES_128_GCM') -> 16;
mac_digest_size('AEAD_AES_256_GCM') -> 16;
mac_digest_size(none) -> 0.
-peer_name({Host, _}) ->
- Host.
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Diffie-Hellman utils
@@ -1668,9 +1728,19 @@ peer_name({Host, _}) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
dh_group('diffie-hellman-group1-sha1') -> ?dh_group1;
-dh_group('diffie-hellman-group14-sha1') -> ?dh_group14.
+dh_group('diffie-hellman-group14-sha1') -> ?dh_group14;
+dh_group('diffie-hellman-group14-sha256') -> ?dh_group14;
+dh_group('diffie-hellman-group16-sha512') -> ?dh_group16;
+dh_group('diffie-hellman-group18-sha512') -> ?dh_group18.
%%%----------------------------------------------------------------
+parallell_gen_key(Ssh = #ssh{keyex_key = {x, {G, P}},
+ algorithms = Algs}) ->
+ Sz = dh_bits(Algs),
+ {Public, Private} = generate_key(dh, [P,G,2*Sz]),
+ Ssh#ssh{keyex_key = {{Private, Public}, {G, P}}}.
+
+
generate_key(Algorithm, Args) ->
{Public,Private} = crypto:generate_key(Algorithm, Args),
{crypto:bytes_to_integer(Public), crypto:bytes_to_integer(Private)}.
@@ -1681,6 +1751,15 @@ compute_key(Algorithm, OthersPublic, MyPrivate, Args) ->
crypto:bytes_to_integer(Shared).
+dh_bits(#alg{encrypt = Encrypt,
+ send_mac = SendMac}) ->
+ C = cipher(Encrypt),
+ 8 * lists:max([C#cipher_data.key_bytes,
+ C#cipher_data.block_bytes,
+ C#cipher_data.iv_bytes,
+ mac_key_bytes(SendMac)
+ ]).
+
ecdh_curve('ecdh-sha2-nistp256') -> secp256r1;
ecdh_curve('ecdh-sha2-nistp384') -> secp384r1;
ecdh_curve('ecdh-sha2-nistp521') -> secp521r1.
diff --git a/lib/ssh/src/ssh_transport.hrl b/lib/ssh/src/ssh_transport.hrl
index f91cb1dd63..19b3f5c437 100644
--- a/lib/ssh/src/ssh_transport.hrl
+++ b/lib/ssh/src/ssh_transport.hrl
@@ -112,7 +112,7 @@
%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% diffie-hellman-group1-sha1 | diffie-hellman-group14-sha1
+%% diffie-hellman-group*-sha*
-define(SSH_MSG_KEXDH_INIT, 30).
-define(SSH_MSG_KEXDH_REPLY, 31).
@@ -238,4 +238,15 @@
-define(dh_group14,
{2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AACAA68FFFFFFFFFFFFFFFF}).
+%%% rfc 3526, ch5
+%%% Size 4096-bit
+-define(dh_group16,
+ {2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C934063199FFFFFFFFFFFFFFFF}).
+
+%%% rfc 3526, ch7
+%%% Size 8192-bit
+-define(dh_group18,
+ {2, 16#FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE45B3DC2007CB8A163BF0598DA48361C55D39A69163FA8FD24CF5F83655D23DCA3AD961C62F356208552BB9ED529077096966D670C354E4ABC9804F1746C08CA18217C32905E462E36CE3BE39E772C180E86039B2783A2EC07A28FB5C55DF06F4C52C9DE2BCBF6955817183995497CEA956AE515D2261898FA051015728E5A8AAAC42DAD33170D04507A33A85521ABDF1CBA64ECFB850458DBEF0A8AEA71575D060C7DB3970F85A6E1E4C7ABF5AE8CDB0933D71E8C94E04A25619DCEE3D2261AD2EE6BF12FFA06D98A0864D87602733EC86A64521F2B18177B200CBBE117577A615D6C770988C0BAD946E208E24FA074E5AB3143DB5BFCE0FD108E4B82D120A92108011A723C12A787E6D788719A10BDBA5B2699C327186AF4E23C1A946834B6150BDA2583E9CA2AD44CE8DBBBC2DB04DE8EF92E8EFC141FBECAA6287C59474E6BC05D99B2964FA090C3A2233BA186515BE7ED1F612970CEE2D7AFB81BDD762170481CD0069127D5B05AA993B4EA988D8FDDC186FFB7DC90A6C08F4DF435C93402849236C3FAB4D27C7026C1D4DCB2602646DEC9751E763DBA37BDF8FF9406AD9E530EE5DB382F413001AEB06A53ED9027D831179727B0865A8918DA3EDBEBCF9B14ED44CE6CBACED4BB1BDB7F1447E6CC254B332051512BD7AF426FB8F401378CD2BF5983CA01C64B92ECF032EA15D1721D03F482D7CE6E74FEF6D55E702F46980C82B5A84031900B1C9E59E7C97FBEC7E8F323A97A7E36CC88BE0F1D45B7FF585AC54BD407B22B4154AACC8F6D7EBF48E1D814CC5ED20F8037E0A79715EEF29BE32806A1D58BB7C5DA76F550AA3D8A1FBFF0EB19CCB1A313D55CDA56C9EC2EF29632387FE8D76E3C0468043E8F663F4860EE12BF2D5B0B7474D6E694F91E6DBE115974A3926F12FEE5E438777CB6A932DF8CD8BEC4D073B931BA3BC832B68D9DD300741FA7BF8AFC47ED2576F6936BA424663AAB639C5AE4F5683423B4742BF1C978238F16CBE39D652DE3FDB8BEFC848AD922222E04A4037C0713EB57A81A23F0C73473FC646CEA306B4BCBC8862F8385DDFA9D4B7FA2C087E879683303ED5BDD3A062B3CF5B3A278A66D2A13F83F44F82DDF310EE074AB6A364597E899A0255DC164F31CC50846851DF9AB48195DED7EA1B1D510BD7EE74D73FAF36BC31ECFA268359046F4EB879F924009438B481C6CD7889A002ED5EE382BC9190DA6FC026E479558E4475677E9AA9E3050E2765694DFC81F56E880B96E7160C980DD98EDD3DFFFFFFFFFFFFFFFFF}).
+
+
-endif. % -ifdef(ssh_transport).
diff --git a/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl b/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
index 0f8a838f97..8ca29b9399 100644
--- a/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
+++ b/lib/ssh/test/property_test/ssh_eqc_encode_decode.erl
@@ -184,10 +184,7 @@ gen_byte(N) when N>0 -> [gen_byte() || _ <- lists:seq(1,N)].
gen_char() -> choose($a,$z).
-gen_mpint() -> ?LET(Size, choose(1,20),
- ?LET(Str, vector(Size, gen_byte()),
- gen_string( strip_0s(Str) )
- )).
+gen_mpint() -> ?LET(I, largeint(), ssh_bits:mpint(I)).
strip_0s([0|T]) -> strip_0s(T);
strip_0s(X) -> X.
diff --git a/lib/ssh/test/ssh_algorithms_SUITE.erl b/lib/ssh/test/ssh_algorithms_SUITE.erl
index 14605ee44f..313b7fc559 100644
--- a/lib/ssh/test/ssh_algorithms_SUITE.erl
+++ b/lib/ssh/test/ssh_algorithms_SUITE.erl
@@ -58,9 +58,11 @@ groups() ->
|| {Tag,Algs} <- ErlAlgos,
lists:member(Tag,tags())
],
+
+ TypeSSH = ssh_test_lib:ssh_type(),
AlgoTcSet =
- [{Alg, [parallel], specific_test_cases(Tag,Alg,SshcAlgos,SshdAlgos)}
+ [{Alg, [parallel], specific_test_cases(Tag,Alg,SshcAlgos,SshdAlgos,TypeSSH)}
|| {Tag,Algs} <- ErlAlgos ++ DoubleAlgos,
Alg <- Algs],
@@ -198,8 +200,6 @@ try_exec_simple_group(Group, Config) ->
%%--------------------------------------------------------------------
%% Testing all default groups
-simple_exec_groups() -> [{timetrap,{minutes,8}}].
-
simple_exec_groups(Config) ->
Sizes = interpolate( public_key:dh_gex_group_sizes() ),
lists:foreach(
@@ -315,18 +315,13 @@ concat(A1, A2) -> list_to_atom(lists:concat([A1," + ",A2])).
split(Alg) -> ssh_test_lib:to_atoms(string:tokens(atom_to_list(Alg), " + ")).
-specific_test_cases(Tag, Alg, SshcAlgos, SshdAlgos) ->
+specific_test_cases(Tag, Alg, SshcAlgos, SshdAlgos, TypeSSH) ->
[simple_exec, simple_sftp] ++
case supports(Tag, Alg, SshcAlgos) of
- true ->
- case ssh_test_lib:ssh_type() of
- openSSH ->
- [sshc_simple_exec_os_cmd];
- _ ->
- []
- end;
- false ->
- []
+ true when TypeSSH == openSSH ->
+ [sshc_simple_exec_os_cmd];
+ _ ->
+ []
end ++
case supports(Tag, Alg, SshdAlgos) of
true ->
diff --git a/lib/ssh/test/ssh_basic_SUITE.erl b/lib/ssh/test/ssh_basic_SUITE.erl
index 0a0ab5cdf7..cdf6cf9ae1 100644
--- a/lib/ssh/test/ssh_basic_SUITE.erl
+++ b/lib/ssh/test/ssh_basic_SUITE.erl
@@ -152,15 +152,27 @@ end_per_suite(_Config) ->
%%--------------------------------------------------------------------
init_per_group(dsa_key, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_dsa(DataDir, PrivDir),
- Config;
+ case lists:member('ssh-dss',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_dsa(DataDir, PrivDir),
+ Config;
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(rsa_key, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_rsa(DataDir, PrivDir),
- Config;
+ case lists:member('ssh-rsa',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_rsa(DataDir, PrivDir),
+ Config;
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(ecdsa_sha2_nistp256_key, Config) ->
case lists:member('ecdsa-sha2-nistp256',
ssh_transport:default_algorithms(public_key)) of
@@ -195,15 +207,27 @@ init_per_group(ecdsa_sha2_nistp521_key, Config) ->
{skip, unsupported_pub_key}
end;
init_per_group(rsa_pass_key, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_rsa_pass_pharse(DataDir, PrivDir, "Password"),
- [{pass_phrase, {rsa_pass_phrase, "Password"}}| Config];
+ case lists:member('ssh-rsa',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_rsa_pass_pharse(DataDir, PrivDir, "Password"),
+ [{pass_phrase, {rsa_pass_phrase, "Password"}}| Config];
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(dsa_pass_key, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_dsa_pass_pharse(DataDir, PrivDir, "Password"),
- [{pass_phrase, {dsa_pass_phrase, "Password"}}| Config];
+ case lists:member('ssh-dss',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_dsa_pass_pharse(DataDir, PrivDir, "Password"),
+ [{pass_phrase, {dsa_pass_phrase, "Password"}}| Config];
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(host_user_key_differs, Config) ->
Data = proplists:get_value(data_dir, Config),
Sys = filename:join(proplists:get_value(priv_dir, Config), system_rsa),
@@ -220,10 +244,16 @@ init_per_group(host_user_key_differs, Config) ->
ssh_test_lib:setup_rsa_known_host(Sys, Usr),
Config;
init_per_group(key_cb, Config) ->
- DataDir = proplists:get_value(data_dir, Config),
- PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:setup_dsa(DataDir, PrivDir),
- Config;
+ case lists:member('ssh-rsa',
+ ssh_transport:default_algorithms(public_key)) of
+ true ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_rsa(DataDir, PrivDir),
+ Config;
+ false ->
+ {skip, unsupported_pub_key}
+ end;
init_per_group(internal_error, Config) ->
DataDir = proplists:get_value(data_dir, Config),
PrivDir = proplists:get_value(priv_dir, Config),
@@ -293,7 +323,7 @@ end_per_group(rsa_pass_key, Config) ->
Config;
end_per_group(key_cb, Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
- ssh_test_lib:clean_dsa(PrivDir),
+ ssh_test_lib:clean_rsa(PrivDir),
Config;
end_per_group(internal_error, Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
@@ -750,7 +780,7 @@ key_callback_options(Config) when is_list(Config) ->
{user_dir, UserDir},
{failfun, fun ssh_test_lib:failfun/2}]),
- {ok, PrivKey} = file:read_file(filename:join(UserDir, "id_dsa")),
+ {ok, PrivKey} = file:read_file(filename:join(UserDir, "id_rsa")),
ConnectOpts = [{silently_accept_hosts, true},
{user_dir, NoPubKeyDir},
@@ -1206,7 +1236,7 @@ check_error("Invalid state") ->
ok;
check_error("Connection closed") ->
ok;
-check_error("Selection of key exchange algorithm failed") ->
+check_error("Selection of key exchange algorithm failed"++_) ->
ok;
check_error(Error) ->
ct:fail(Error).
diff --git a/lib/ssh/test/ssh_benchmark_SUITE.erl b/lib/ssh/test/ssh_benchmark_SUITE.erl
index c2bfc48449..85750f8fbd 100644
--- a/lib/ssh/test/ssh_benchmark_SUITE.erl
+++ b/lib/ssh/test/ssh_benchmark_SUITE.erl
@@ -30,7 +30,7 @@
suite() -> [{ct_hooks,[{ts_install_cth,[{nodenames,2}]}]},
- {timetrap,{minutes,3}}
+ {timetrap,{minutes,6}}
].
%%suite() -> [{ct_hooks,[ts_install_cth]}].
@@ -70,9 +70,12 @@ init_per_group(opensshc_erld, Config) ->
ssh_test_lib:setup_dsa(DataDir, UserDir),
ssh_test_lib:setup_rsa(DataDir, UserDir),
ssh_test_lib:setup_ecdsa("256", DataDir, UserDir),
+ AlgsD = ssh:default_algorithms(),
+ AlgsC = ssh_test_lib:default_algorithms(sshc),
Common = ssh_test_lib:intersect_bi_dir(
- ssh_test_lib:intersection(ssh:default_algorithms(),
- ssh_test_lib:default_algorithms(sshc))),
+ ssh_test_lib:intersection(AlgsD, AlgsC)),
+ ct:pal("~p~n~nErld:~n~p~n~nOpenSSHc:~n~p~n~nCommon:~n~p",
+ [inet:gethostname(), AlgsD, AlgsC, Common]),
[{c_kexs, ssh_test_lib:sshc(kex)},
{c_ciphers, ssh_test_lib:sshc(cipher)},
{common_algs, Common}
@@ -427,13 +430,20 @@ function_algs_times_sizes(EncDecs, L) ->
|| {Alg,Size,Time} <- lists:foldl(fun increment/2, [], Raw)].
function_ats_result({ssh_transport,encrypt,2}, #call{args=[S,Data]}) ->
- {{encrypt,S#ssh.encrypt}, size(Data)};
+ {{encrypt,S#ssh.encrypt}, binsize(Data)};
function_ats_result({ssh_transport,decrypt,2}, #call{args=[S,Data]}) ->
- {{decrypt,S#ssh.decrypt}, size(Data)};
+ {{decrypt,S#ssh.decrypt}, binsize(Data)};
function_ats_result({ssh_message,encode,1}, #call{result=Data}) ->
{encode, size(Data)};
function_ats_result({ssh_message,decode,1}, #call{args=[Data]}) ->
{decode, size(Data)}.
+
+binsize(B) when is_binary(B) -> size(B);
+binsize({B1,B2}) when is_binary(B1), is_binary(B2) -> size(B1) + size(B2);
+binsize({B1,B2,_}) when is_binary(B1), is_binary(B2) -> size(B1) + size(B2).
+
+
+
increment({Alg,Sz,T}, [{Alg,SumSz,SumT}|Acc]) ->
diff --git a/lib/ssh/test/ssh_key_cb.erl b/lib/ssh/test/ssh_key_cb.erl
index 388ec2ecc1..12ff79efcd 100644
--- a/lib/ssh/test/ssh_key_cb.erl
+++ b/lib/ssh/test/ssh_key_cb.erl
@@ -33,9 +33,9 @@ add_host_key(_, _, _) ->
is_host_key(_, _, _, _) ->
true.
-user_key('ssh-dss', Opts) ->
+user_key('ssh-rsa', Opts) ->
UserDir = proplists:get_value(user_dir, Opts),
- KeyFile = filename:join(filename:dirname(UserDir), "id_dsa"),
+ KeyFile = filename:join(filename:dirname(UserDir), "id_rsa"),
{ok, KeyBin} = file:read_file(KeyFile),
[Entry] = public_key:pem_decode(KeyBin),
Key = public_key:pem_entry_decode(Entry),
diff --git a/lib/ssh/test/ssh_key_cb_options.erl b/lib/ssh/test/ssh_key_cb_options.erl
index afccb34f0f..946a1254d0 100644
--- a/lib/ssh/test/ssh_key_cb_options.erl
+++ b/lib/ssh/test/ssh_key_cb_options.erl
@@ -33,7 +33,7 @@ add_host_key(_, _, _) ->
is_host_key(_, _, _, _) ->
true.
-user_key('ssh-dss', Opts) ->
+user_key('ssh-rsa', Opts) ->
KeyCbOpts = proplists:get_value(key_cb_private, Opts),
KeyBin = proplists:get_value(priv_key, KeyCbOpts),
[Entry] = public_key:pem_decode(KeyBin),
diff --git a/lib/ssh/test/ssh_options_SUITE.erl b/lib/ssh/test/ssh_options_SUITE.erl
index 86f5cb1746..758c20e2b8 100644
--- a/lib/ssh/test/ssh_options_SUITE.erl
+++ b/lib/ssh/test/ssh_options_SUITE.erl
@@ -67,7 +67,8 @@
hostkey_fingerprint_check_sha/1,
hostkey_fingerprint_check_sha256/1,
hostkey_fingerprint_check_sha384/1,
- hostkey_fingerprint_check_sha512/1
+ hostkey_fingerprint_check_sha512/1,
+ hostkey_fingerprint_check_list/1
]).
%%% Common test callbacks
@@ -112,6 +113,7 @@ all() ->
hostkey_fingerprint_check_sha256,
hostkey_fingerprint_check_sha384,
hostkey_fingerprint_check_sha512,
+ hostkey_fingerprint_check_list,
id_string_no_opt_client,
id_string_own_string_client,
id_string_random_client,
@@ -148,6 +150,7 @@ init_per_group(hardening_tests, Config) ->
DataDir = proplists:get_value(data_dir, Config),
PrivDir = proplists:get_value(priv_dir, Config),
ssh_test_lib:setup_dsa(DataDir, PrivDir),
+ ssh_test_lib:setup_rsa(DataDir, PrivDir),
Config;
init_per_group(dir_options, Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
@@ -812,6 +815,8 @@ hostkey_fingerprint_check_sha384(Config) ->
hostkey_fingerprint_check_sha512(Config) ->
do_hostkey_fingerprint_check(Config, sha512).
+hostkey_fingerprint_check_list(Config) ->
+ do_hostkey_fingerprint_check(Config, [sha,md5,sha256]).
%%%----
do_hostkey_fingerprint_check(Config, HashAlg) ->
@@ -824,9 +829,10 @@ do_hostkey_fingerprint_check(Config, HashAlg) ->
supported_hash(old) -> true;
supported_hash(HashAlg) ->
- proplists:get_value(HashAlg,
- proplists:get_value(hashs, crypto:supports(), []),
- false).
+ Hs = if is_atom(HashAlg) -> [HashAlg];
+ is_list(HashAlg) -> HashAlg
+ end,
+ [] == (Hs -- proplists:get_value(hashs, crypto:supports(), [])).
really_do_hostkey_fingerprint_check(Config, HashAlg) ->
@@ -840,7 +846,7 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) ->
%% All host key fingerprints. Trust that public_key has checked the ssh_hostkey_fingerprint
%% function since that function is used by the ssh client...
- FPs = [case HashAlg of
+ FPs0 = [case HashAlg of
old -> public_key:ssh_hostkey_fingerprint(Key);
_ -> public_key:ssh_hostkey_fingerprint(HashAlg, Key)
end
@@ -856,6 +862,9 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) ->
_:_ -> []
end
end],
+ FPs = if is_atom(HashAlg) -> FPs0;
+ is_list(HashAlg) -> lists:concat(FPs0)
+ end,
ct:log("Fingerprints(~p) = ~p",[HashAlg,FPs]),
%% Start daemon with the public keys that we got fingerprints from
@@ -866,8 +875,12 @@ really_do_hostkey_fingerprint_check(Config, HashAlg) ->
FP_check_fun = fun(PeerName, FP) ->
ct:pal("PeerName = ~p, FP = ~p",[PeerName,FP]),
HostCheck = (Host == PeerName),
- FPCheck = lists:member(FP, FPs),
- ct:log("check ~p == ~p (~p) and ~n~p in ~p (~p)~n",
+ FPCheck =
+ if is_atom(HashAlg) -> lists:member(FP, FPs);
+ is_list(HashAlg) -> lists:all(fun(FP1) -> lists:member(FP1,FPs) end,
+ FP)
+ end,
+ ct:log("check ~p == ~p (~p) and ~n~p~n in ~p (~p)~n",
[PeerName,Host,HostCheck,FP,FPs,FPCheck]),
HostCheck and FPCheck
end,
diff --git a/lib/ssh/test/ssh_protocol_SUITE.erl b/lib/ssh/test/ssh_protocol_SUITE.erl
index 93d0bc2eb0..2c4fa8be88 100644
--- a/lib/ssh/test/ssh_protocol_SUITE.erl
+++ b/lib/ssh/test/ssh_protocol_SUITE.erl
@@ -34,6 +34,12 @@
-define(NEWLINE, <<"\r\n">>).
-define(REKEY_DATA_TMO, 65000).
+%%-define(DEFAULT_KEX, 'diffie-hellman-group1-sha1').
+-define(DEFAULT_KEX, 'diffie-hellman-group14-sha256').
+
+-define(CIPHERS, ['aes256-ctr','aes192-ctr','aes128-ctr','aes128-cbc','3des-cbc']).
+-define(DEFAULT_CIPHERS, [{client2server,?CIPHERS}, {server2client,?CIPHERS}]).
+
-define(v(Key, Config), proplists:get_value(Key, Config)).
-define(v(Key, Config, Default), proplists:get_value(Key, Config, Default)).
@@ -97,7 +103,9 @@ end_per_suite(Config) ->
init_per_testcase(no_common_alg_server_disconnects, Config) ->
- start_std_daemon(Config, [{preferred_algorithms,[{public_key,['ssh-rsa']}]}]);
+ start_std_daemon(Config, [{preferred_algorithms,[{public_key,['ssh-rsa']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}]);
init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ;
TC == gex_client_init_option_groups_moduli_file ;
@@ -107,7 +115,10 @@ init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ;
TC == gex_client_old_request_noexact ->
Opts = case TC of
gex_client_init_option_groups ->
- [{dh_gex_groups, [{2345, 3, 41}]}];
+ [{dh_gex_groups,
+ [{1023, 5,
+ 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F
+ }]}];
gex_client_init_option_groups_file ->
DataDir = proplists:get_value(data_dir, Config),
F = filename:join(DataDir, "dh_group_test"),
@@ -119,16 +130,19 @@ init_per_testcase(TC, Config) when TC == gex_client_init_option_groups ;
_ when TC == gex_server_gex_limit ;
TC == gex_client_old_request_exact ;
TC == gex_client_old_request_noexact ->
- [{dh_gex_groups, [{ 500, 3, 17},
- {1000, 7, 91},
- {3000, 5, 61}]},
- {dh_gex_limits,{500,1500}}
+ [{dh_gex_groups,
+ [{1023, 2, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A771225323},
+ {1535, 5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827},
+ {3071, 2, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9429825A2B}
+ ]},
+ {dh_gex_limits, {1023,2000}}
];
_ ->
[]
end,
start_std_daemon(Config,
- [{preferred_algorithms, ssh:default_algorithms()}
+ [{preferred_algorithms,[{cipher,?DEFAULT_CIPHERS}
+ ]}
| Opts]);
init_per_testcase(_TestCase, Config) ->
check_std_daemon_works(Config, ?LINE).
@@ -237,7 +251,10 @@ lib_works_as_server(Config) ->
%% and finally connect to it with a regular Erlang SSH client:
{ok,_} = std_connect(HostPort, Config,
- [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]}]
+ [{preferred_algorithms,[{kex,[?DEFAULT_KEX]},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}
+ ]
).
%%--------------------------------------------------------------------
@@ -277,7 +294,9 @@ no_common_alg_server_disconnects(Config) ->
[{silently_accept_hosts, true},
{user_dir, user_dir(Config)},
{user_interaction, false},
- {preferred_algorithms,[{public_key,['ssh-dss']}]}
+ {preferred_algorithms,[{public_key,['ssh-dss']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}
]},
receive_hello,
{send, hello},
@@ -311,7 +330,7 @@ no_common_alg_client_disconnects(Config) ->
{match, #ssh_msg_kexinit{_='_'}, receive_msg},
{send, #ssh_msg_kexinit{ % with unsupported "SOME-UNSUPPORTED"
cookie = <<80,158,95,51,174,35,73,130,246,141,200,49,180,190,82,234>>,
- kex_algorithms = ["diffie-hellman-group1-sha1"],
+ kex_algorithms = [atom_to_list(?DEFAULT_KEX)],
server_host_key_algorithms = ["SOME-UNSUPPORTED"], % SIC!
encryption_algorithms_client_to_server = ["aes128-ctr"],
encryption_algorithms_server_to_client = ["aes128-ctr"],
@@ -332,7 +351,9 @@ no_common_alg_client_disconnects(Config) ->
%% and finally connect to it with a regular Erlang SSH client
%% which of course does not support SOME-UNSUPPORTED as pub key algo:
- Result = std_connect(HostPort, Config, [{preferred_algorithms,[{public_key,['ssh-dss']}]}]),
+ Result = std_connect(HostPort, Config, [{preferred_algorithms,[{public_key,['ssh-dss']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}]),
ct:log("Result of connect is ~p",[Result]),
receive
@@ -351,20 +372,25 @@ no_common_alg_client_disconnects(Config) ->
%%%--------------------------------------------------------------------
gex_client_init_option_groups(Config) ->
- do_gex_client_init(Config, {2000, 2048, 4000},
- {3,41}).
+ do_gex_client_init(Config, {512, 2048, 4000},
+ {5,16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F}
+ ).
gex_client_init_option_groups_file(Config) ->
do_gex_client_init(Config, {2000, 2048, 4000},
- {5,61}).
+ {5, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9424273F1F}
+ ).
gex_client_init_option_groups_moduli_file(Config) ->
do_gex_client_init(Config, {2000, 2048, 4000},
- {5,16#B7}).
+ {5, 16#DD2047CBDBB6F8E919BC63DE885B34D0FD6E3DB2887D8B46FE249886ACED6B46DFCD5553168185FD376122171CD8927E60120FA8D01F01D03E58281FEA9A1ABE97631C828E41815F34FDCDF787419FE13A3137649AA93D2584230DF5F24B5C00C88B7D7DE4367693428C730376F218A53E853B0851BAB7C53C15DA7839CBE1285DB63F6FA45C1BB59FE1C5BB918F0F8459D7EF60ACFF5C0FA0F3FCAD1C5F4CE4416D4F4B36B05CDCEBE4FB879E95847EFBC6449CD190248843BC7EDB145FBFC4EDBB1A3C959298F08F3BA2CFBE231BBE204BE6F906209D28BD4820AB3E7BE96C26AE8A809ADD8D1A5A0B008E9570FA4C4697E116B8119892C604293683A9635F}
+ ).
gex_server_gex_limit(Config) ->
do_gex_client_init(Config, {1000, 3000, 4000},
- {7,91}).
+ %% {7,91}).
+ {5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827}
+ ).
do_gex_client_init(Config, {Min,N,Max}, {G,P}) ->
@@ -376,7 +402,9 @@ do_gex_client_init(Config, {Min,N,Max}, {G,P}) ->
[{silently_accept_hosts, true},
{user_dir, user_dir(Config)},
{user_interaction, false},
- {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']}]}
+ {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}
]},
receive_hello,
{send, hello},
@@ -390,8 +418,15 @@ do_gex_client_init(Config, {Min,N,Max}, {G,P}) ->
).
%%%--------------------------------------------------------------------
-gex_client_old_request_exact(Config) -> do_gex_client_init_old(Config, 500, {3,17}).
-gex_client_old_request_noexact(Config) -> do_gex_client_init_old(Config, 800, {7,91}).
+gex_client_old_request_exact(Config) ->
+ do_gex_client_init_old(Config, 1023,
+ {2, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A771225323}
+ ).
+
+gex_client_old_request_noexact(Config) ->
+ do_gex_client_init_old(Config, 1400,
+ {5, 16#D1391174233D315398FE2830AC6B2B66BCCD01B0A634899F339B7879F1DB85712E9DC4E4B1C6C8355570C1D2DCB53493DF18175A9C53D1128B592B4C72D97136F5542FEB981CBFE8012FDD30361F288A42BD5EBB08BAB0A5640E1AC48763B2ABD1945FEE36B2D55E1D50A1C86CED9DD141C4E7BE2D32D9B562A0F8E2E927020E91F58B57EB9ACDDA106A59302D7E92AD5F6E851A45FA1CFE86029A0F727F65A8F475F33572E2FDAB6073F0C21B8B54C3823DB2EF068927E5D747498F96E1E827}
+ ).
do_gex_client_init_old(Config, N, {G,P}) ->
{ok,_} =
@@ -402,7 +437,9 @@ do_gex_client_init_old(Config, N, {G,P}) ->
[{silently_accept_hosts, true},
{user_dir, user_dir(Config)},
{user_interaction, false},
- {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']}]}
+ {preferred_algorithms,[{kex,['diffie-hellman-group-exchange-sha1']},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}
]},
receive_hello,
{send, hello},
@@ -572,7 +609,9 @@ client_handles_keyboard_interactive_0_pwds(Config) ->
%% and finally connect to it with a regular Erlang SSH client:
{ok,_} = std_connect(HostPort, Config,
- [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]}]
+ [{preferred_algorithms,[{kex,[?DEFAULT_KEX]},
+ {cipher,?DEFAULT_CIPHERS}
+ ]}]
).
@@ -623,6 +662,7 @@ stop_apps(_Config) ->
setup_dirs(Config) ->
DataDir = proplists:get_value(data_dir, Config),
PrivDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:setup_dsa(DataDir, PrivDir),
ssh_test_lib:setup_rsa(DataDir, PrivDir),
Config.
@@ -708,7 +748,9 @@ connect_and_kex(Config, InitialState) ->
ssh_trpt_test_lib:exec(
[{connect,
server_host(Config),server_port(Config),
- [{preferred_algorithms,[{kex,['diffie-hellman-group1-sha1']}]},
+ [{preferred_algorithms,[{kex,[?DEFAULT_KEX]},
+ {cipher,?DEFAULT_CIPHERS}
+ ]},
{silently_accept_hosts, true},
{user_dir, user_dir(Config)},
{user_interaction, false}]},
diff --git a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
index 2887bb4b60..87c4b4afc8 100644
--- a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
+++ b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test
@@ -1,3 +1,3 @@
-{2222, 5, 61}.
-{1111, 7, 91}.
+{1023, 5, 16#D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A770E2EC9F}.
+{3071, 5, 16#DFAA35D35531E0F524F0099877A482D2AC8D589F374394A262A8E81A8A4FB2F65FADBAB395E05D147B29D486DFAA41F41597A256DA82A8B6F76401AED53D0253F956CEC610D417E42E3B287F7938FC24D8821B40BFA218A956EB7401BED6C96C68C7FD64F8170A8A76B953DD2F05420118F6B144D8FE48060A2BCB85056B478EDEF96DBC70427053ECD2958C074169E9550DD877779A3CF17C5AC850598C7586BEEA9DCFE9DD2A5FB62DF5F33EA7BC00CDA31B9D2DD721F979EA85B6E63F0C4E30BDDCD3A335522F9004C4ED50B15DC537F55324DD4FA119FB3F101467C6D7E1699DE4B3E3C478A8679B8EB3FA5C9B826B44530FD3BE9AD3063B240B0C853EBDDBD68DD940332D98F148D5D9E1DC977D60A0D23D0CA1198637FEAE4E7FAAC173AF2B84313A666CFB4EE6972811921D0AD867CE57F3BBC8D6CB057E3B66757BB46C9F72662624D44E14528327E3A7100E81A12C43C4E236118318CD90C8AA185BBB0C764826DAEAEE8DD245C5B451B4944E6122CC522D1C335C2EEF9424273F1F}.
diff --git a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
index f6995ba4c9..6d2b4bcb59 100644
--- a/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
+++ b/lib/ssh/test/ssh_protocol_SUITE_data/dh_group_test.moduli
@@ -1,3 +1,2 @@
-20151021104105 2 6 100 2222 5 B7
-20151021104106 2 6 100 1111 5 4F
-
+20120821044046 2 6 100 1023 2 D9277DAA27DB131C03B108D41A76B4DA8ACEECCCAE73D2E48CEDAAA70B09EF9F04FB020DCF36C51B8E485B26FABE0337E24232BE4F4E693548310244937433FB1A5758195DC73B84ADEF8237472C46747D79DC0A2CF8A57CE8DBD8F466A20F8551E7B1B824B2E4987A8816D9BC0741C2798F3EBAD3ADEBCC78FCE6A7711F2C6B
+20120821050554 2 6 100 2047 5 DD2047CBDBB6F8E919BC63DE885B34D0FD6E3DB2887D8B46FE249886ACED6B46DFCD5553168185FD376122171CD8927E60120FA8D01F01D03E58281FEA9A1ABE97631C828E41815F34FDCDF787419FE13A3137649AA93D2584230DF5F24B5C00C88B7D7DE4367693428C730376F218A53E853B0851BAB7C53C15DA7839CBE1285DB63F6FA45C1BB59FE1C5BB918F0F8459D7EF60ACFF5C0FA0F3FCAD1C5F4CE4416D4F4B36B05CDCEBE4FB879E95847EFBC6449CD190248843BC7EDB145FBFC4EDBB1A3C959298F08F3BA2CFBE231BBE204BE6F906209D28BD4820AB3E7BE96C26AE8A809ADD8D1A5A0B008E9570FA4C4697E116B8119892C604293683A9635F
diff --git a/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key
new file mode 100644
index 0000000000..79968bdd7d
--- /dev/null
+++ b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key
@@ -0,0 +1,16 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXQIBAAKBgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8semM4q843337
+zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RWRWzjaxSB
+6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4QIDAQAB
+AoGANmvJzJO5hkLuvyDZHKfAnGTtpifcR1wtSa9DjdKUyn8vhKF0mIimnbnYQEmW
+NUUb3gXCZLi9PvkpRSVRrASDOZwcjoU/Kvww163vBUVb2cOZfFhyn6o2Sk88Tt++
+udH3hdjpf9i7jTtUkUe+QYPsia+wgvvrmn4QrahLAH86+kECQQDx5gFeXTME3cnW
+WMpFz3PPumduzjqgqMMWEccX4FtQkMX/gyGa5UC7OHFyh0N/gSWvPbRHa8A6YgIt
+n8DO+fh5AkEAzbqX4DOn8NY6xJIi42q7l/2jIA0RkB6P7YugW5NblhqBZ0XDnpA5
+sMt+rz+K07u9XZtxgh1xi7mNfwY6lEAMqQJBAJBEauCKmRj35Z6OyeQku59SPsnY
++SJEREVvSNw2lH9SOKQQ4wPsYlTGbvKtNVZgAcen91L5MmYfeckYE/fdIZECQQCt
+64zxsTnM1I8iFxj/gP/OYlJBikrKt8udWmjaghzvLMEw+T2DExJyb9ZNeT53+UMB
+m6O+B/4xzU/djvp+0hbhAkAemIt+rA5kTmYlFndhpvzkSSM8a2EXsO4XIPgGWCTT
+tQKS/tTly0ADMjN/TVy11+9d6zcqadNVuHXHGtR4W0GR
+-----END RSA PRIVATE KEY-----
+
diff --git a/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub
new file mode 100644
index 0000000000..75d2025c71
--- /dev/null
+++ b/lib/ssh/test/ssh_sftp_SUITE_data/ssh_host_rsa_key.pub
@@ -0,0 +1,5 @@
+---- BEGIN SSH2 PUBLIC KEY ----
+AAAAB3NzaC1yc2EAAAADAQABAAAAgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8
+semM4q843337zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RW
+RWzjaxSB6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4Q==
+---- END SSH2 PUBLIC KEY ----
diff --git a/lib/ssh/test/ssh_sftpd_SUITE.erl b/lib/ssh/test/ssh_sftpd_SUITE.erl
index 52a26110c4..6d18a980ee 100644
--- a/lib/ssh/test/ssh_sftpd_SUITE.erl
+++ b/lib/ssh/test/ssh_sftpd_SUITE.erl
@@ -65,7 +65,12 @@ all() ->
ver3_open_flags,
relpath,
sshd_read_file,
- ver6_basic].
+ ver6_basic,
+ access_outside_root,
+ root_with_cwd,
+ relative_path,
+ open_file_dir_v5,
+ open_file_dir_v6].
groups() ->
[].
@@ -117,6 +122,31 @@ init_per_testcase(TestCase, Config) ->
ver6_basic ->
SubSystems = [ssh_sftpd:subsystem_spec([{sftpd_vsn, 6}])],
ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ access_outside_root ->
+ %% Build RootDir/access_outside_root/a/b and set Root and CWD
+ BaseDir = filename:join(PrivDir, access_outside_root),
+ RootDir = filename:join(BaseDir, a),
+ CWD = filename:join(RootDir, b),
+ %% Make the directory chain:
+ ok = filelib:ensure_dir(filename:join(CWD, tmp)),
+ SubSystems = [ssh_sftpd:subsystem_spec([{root, RootDir},
+ {cwd, CWD}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ root_with_cwd ->
+ RootDir = filename:join(PrivDir, root_with_cwd),
+ CWD = filename:join(RootDir, home),
+ SubSystems = [ssh_sftpd:subsystem_spec([{root, RootDir}, {cwd, CWD}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ relative_path ->
+ SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ open_file_dir_v5 ->
+ SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
+ open_file_dir_v6 ->
+ SubSystems = [ssh_sftpd:subsystem_spec([{cwd, PrivDir},
+ {sftpd_vsn, 6}])],
+ ssh:daemon(0, [{subsystems, SubSystems}|Options]);
_ ->
SubSystems = [ssh_sftpd:subsystem_spec([])],
ssh:daemon(0, [{subsystems, SubSystems}|Options])
@@ -646,6 +676,133 @@ ver6_basic(Config) when is_list(Config) ->
open_file(PrivDir, Cm, Channel, ReqId,
?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
?SSH_FXF_OPEN_EXISTING).
+
+%%--------------------------------------------------------------------
+access_outside_root() ->
+ [{doc, "Try access files outside the tree below RootDir"}].
+access_outside_root(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ BaseDir = filename:join(PrivDir, access_outside_root),
+ %% A file outside the tree below RootDir which is BaseDir/a
+ %% Make the file BaseDir/bad :
+ BadFilePath = filename:join([BaseDir, bad]),
+ ok = file:write_file(BadFilePath, <<>>),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ %% Try to access a file parallell to the RootDir:
+ try_access("/../bad", Cm, Channel, 0),
+ %% Try to access the same file via the CWD which is /b relative to the RootDir:
+ try_access("../../bad", Cm, Channel, 1).
+
+
+try_access(Path, Cm, Channel, ReqId) ->
+ Return =
+ open_file(Path, Cm, Channel, ReqId,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING),
+ ct:log("Try open ~p -> ~p",[Path,Return]),
+ case Return of
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), _Handle0/binary>>, _} ->
+ ct:fail("Could open a file outside the root tree!");
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId), ?UINT32(Code), Rest/binary>>, <<>>} ->
+ case Code of
+ ?SSH_FX_FILE_IS_A_DIRECTORY ->
+ ct:pal("Got the expected SSH_FX_FILE_IS_A_DIRECTORY status",[]),
+ ok;
+ ?SSH_FX_FAILURE ->
+ ct:pal("Got the expected SSH_FX_FAILURE status",[]),
+ ok;
+ _ ->
+ case Rest of
+ <<?UINT32(Len), Txt:Len/binary, _/binary>> ->
+ ct:fail("Got unexpected SSH_FX_code: ~p (~p)",[Code,Txt]);
+ _ ->
+ ct:fail("Got unexpected SSH_FX_code: ~p",[Code])
+ end
+ end;
+ _ ->
+ ct:fail("Completly unexpected return: ~p", [Return])
+ end.
+
+%%--------------------------------------------------------------------
+root_with_cwd() ->
+ [{doc, "Check if files are found, if the CWD and Root are specified"}].
+root_with_cwd(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ RootDir = filename:join(PrivDir, root_with_cwd),
+ CWD = filename:join(RootDir, home),
+ FileName = "root_with_cwd.txt",
+ FilePath = filename:join(CWD, FileName),
+ ok = filelib:ensure_dir(FilePath),
+ ok = file:write_file(FilePath ++ "0", <<>>),
+ ok = file:write_file(FilePath ++ "1", <<>>),
+ ok = file:write_file(FilePath ++ "2", <<>>),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ ReqId0 = 0,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId0), _Handle0/binary>>, _} =
+ open_file(FileName ++ "0", Cm, Channel, ReqId0,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING),
+ ReqId1 = 1,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId1), _Handle1/binary>>, _} =
+ open_file("./" ++ FileName ++ "1", Cm, Channel, ReqId1,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING),
+ ReqId2 = 2,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId2), _Handle2/binary>>, _} =
+ open_file("/home/" ++ FileName ++ "2", Cm, Channel, ReqId2,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING).
+
+%%--------------------------------------------------------------------
+relative_path() ->
+ [{doc, "Test paths relative to CWD when opening a file handle."}].
+relative_path(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ FileName = "test_relative_path.txt",
+ FilePath = filename:join(PrivDir, FileName),
+ ok = filelib:ensure_dir(FilePath),
+ ok = file:write_file(FilePath, <<>>),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ ReqId = 0,
+ {ok, <<?SSH_FXP_HANDLE, ?UINT32(ReqId), _Handle/binary>>, _} =
+ open_file(FileName, Cm, Channel, ReqId,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING).
+
+%%--------------------------------------------------------------------
+open_file_dir_v5() ->
+ [{doc, "Test if open_file fails when opening existing directory."}].
+open_file_dir_v5(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ FileName = "open_file_dir_v5",
+ FilePath = filename:join(PrivDir, FileName),
+ ok = filelib:ensure_dir(FilePath),
+ ok = file:make_dir(FilePath),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ ReqId = 0,
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId),
+ ?UINT32(?SSH_FX_FAILURE), _/binary>>, _} =
+ open_file(FileName, Cm, Channel, ReqId,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING).
+
+%%--------------------------------------------------------------------
+open_file_dir_v6() ->
+ [{doc, "Test if open_file fails when opening existing directory."}].
+open_file_dir_v6(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ FileName = "open_file_dir_v6",
+ FilePath = filename:join(PrivDir, FileName),
+ ok = filelib:ensure_dir(FilePath),
+ ok = file:make_dir(FilePath),
+ {Cm, Channel} = proplists:get_value(sftp, Config),
+ ReqId = 0,
+ {ok, <<?SSH_FXP_STATUS, ?UINT32(ReqId),
+ ?UINT32(?SSH_FX_FILE_IS_A_DIRECTORY), _/binary>>, _} =
+ open_file(FileName, Cm, Channel, ReqId,
+ ?ACE4_READ_DATA bor ?ACE4_READ_ATTRIBUTES,
+ ?SSH_FXF_OPEN_EXISTING).
+
%%--------------------------------------------------------------------
%% Internal functions ------------------------------------------------
%%--------------------------------------------------------------------
@@ -688,9 +845,7 @@ reply(Cm, Channel, RBuf) ->
30000 -> ct:fail("timeout ~p:~p",[?MODULE,?LINE])
end.
-
open_file(File, Cm, Channel, ReqId, Access, Flags) ->
-
Data = list_to_binary([?uint32(ReqId),
?binary(list_to_binary(File)),
?uint32(Access),
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl b/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
index 56a33d6349..fd5157d603 100644
--- a/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE.erl
@@ -65,6 +65,7 @@ init_per_suite(Config) ->
{ok, FileInfo} = file:read_file_info(FileName),
ok = file:write_file_info(FileName,
FileInfo#file_info{mode = 8#400}),
+ ssh_test_lib:setup_rsa(DataDir, PrivDir),
ssh_test_lib:setup_dsa(DataDir, PrivDir),
Config
end).
@@ -73,6 +74,7 @@ end_per_suite(Config) ->
UserDir = filename:join(proplists:get_value(priv_dir, Config), nopubkey),
file:del_dir(UserDir),
SysDir = proplists:get_value(priv_dir, Config),
+ ssh_test_lib:clean_rsa(SysDir),
ssh_test_lib:clean_dsa(SysDir),
ok.
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa
new file mode 100644
index 0000000000..9d7e0dd5fb
--- /dev/null
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/id_rsa
@@ -0,0 +1,15 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXAIBAAKBgQD1OET+3O/Bvj/dtjxDTXmj1oiJt4sIph5kGy0RfjoPrZfaS+CU
+DhakCmS6t2ivxWFgtpKWaoGMZMJqWj6F6ZsumyFl3FPBtujwY/35cgifrI9Ns4Tl
+zR1uuengNBmV+WRQ5cd9F2qS6Z8aDQihzt0r8JUqLcK+VQbrmNzboCCQQwIDAQAB
+AoGAPQEyqPTt8JUT7mRXuaacjFXiweAXhp9NEDpyi9eLOjtFe9lElZCrsUOkq47V
+TGUeRKEm9qSodfTbKPoqc8YaBJGJPhUaTAcha+7QcDdfHBvIsgxvU7ePVnlpXRp3
+CCUEMPhlnx6xBoTYP+fRU0e3+xJIPVyVCqX1jAdUMkzfRoECQQD6ux7B1QJAIWyK
+SGkbDUbBilNmzCFNgIpOP6PA+bwfi5d16diTpra5AX09keQABAo/KaP1PdV8Vg0p
+z4P3A7G3AkEA+l+AKG6m0kQTTBMJDqOdVPYwe+5GxunMaqmhokpEbuGsrZBl5Dvd
+WpcBjR7jmenrhKZRIuA+Fz5HPo/UQJPl1QJBAKxstDkeED8j/S2XoFhPKAJ+6t39
+sUVICVTIZQeXdmzHJXCcUSkw8+WEhakqw/3SyW0oaK2FSWQJFWJUZ+8eJj8CQEh3
+xeduB5kKnS9CvzdeghZqX6QvVosSdtlUmfUYW/BgH5PpHKTP8wTaeld3XldZTpMJ
+dKiMkUw2+XYROVUrubUCQD+Na1LhULlpn4ISEtIEfqpdlUhxDgO15Wg8USmsng+x
+ICliVOSQtwaZjm8kwaFt0W7XnpnDxbRs37vIEbIMWak=
+-----END RSA PRIVATE KEY-----
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key
new file mode 100644
index 0000000000..79968bdd7d
--- /dev/null
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key
@@ -0,0 +1,16 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIICXQIBAAKBgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8semM4q843337
+zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RWRWzjaxSB
+6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4QIDAQAB
+AoGANmvJzJO5hkLuvyDZHKfAnGTtpifcR1wtSa9DjdKUyn8vhKF0mIimnbnYQEmW
+NUUb3gXCZLi9PvkpRSVRrASDOZwcjoU/Kvww163vBUVb2cOZfFhyn6o2Sk88Tt++
+udH3hdjpf9i7jTtUkUe+QYPsia+wgvvrmn4QrahLAH86+kECQQDx5gFeXTME3cnW
+WMpFz3PPumduzjqgqMMWEccX4FtQkMX/gyGa5UC7OHFyh0N/gSWvPbRHa8A6YgIt
+n8DO+fh5AkEAzbqX4DOn8NY6xJIi42q7l/2jIA0RkB6P7YugW5NblhqBZ0XDnpA5
+sMt+rz+K07u9XZtxgh1xi7mNfwY6lEAMqQJBAJBEauCKmRj35Z6OyeQku59SPsnY
++SJEREVvSNw2lH9SOKQQ4wPsYlTGbvKtNVZgAcen91L5MmYfeckYE/fdIZECQQCt
+64zxsTnM1I8iFxj/gP/OYlJBikrKt8udWmjaghzvLMEw+T2DExJyb9ZNeT53+UMB
+m6O+B/4xzU/djvp+0hbhAkAemIt+rA5kTmYlFndhpvzkSSM8a2EXsO4XIPgGWCTT
+tQKS/tTly0ADMjN/TVy11+9d6zcqadNVuHXHGtR4W0GR
+-----END RSA PRIVATE KEY-----
+
diff --git a/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub
new file mode 100644
index 0000000000..75d2025c71
--- /dev/null
+++ b/lib/ssh/test/ssh_sftpd_erlclient_SUITE_data/ssh_host_rsa_key.pub
@@ -0,0 +1,5 @@
+---- BEGIN SSH2 PUBLIC KEY ----
+AAAAB3NzaC1yc2EAAAADAQABAAAAgQDCZX+4FBDwZIh9y/Uxee1VJnEXlowpz2yDKwj8
+semM4q843337zbNfxHmladB1lpz2NqyxI175xMIJuDxogyZdsOxGnFAzAnthR4dqL/RW
+RWzjaxSB6IAO9SPYVVlrpZ+1hsjLW79fwXK/yc8VdhRuWTeQiRgYY2ek8+OKbOqz4Q==
+---- END SSH2 PUBLIC KEY ----
diff --git a/lib/ssh/test/ssh_test_lib.erl b/lib/ssh/test/ssh_test_lib.erl
index 286ac6e882..1673f52821 100644
--- a/lib/ssh/test/ssh_test_lib.erl
+++ b/lib/ssh/test/ssh_test_lib.erl
@@ -690,13 +690,16 @@ ssh_type() ->
ssh_type1() ->
try
+ ct:log("~p:~p os:find_executable(\"ssh\")",[?MODULE,?LINE]),
case os:find_executable("ssh") of
false ->
ct:log("~p:~p Executable \"ssh\" not found",[?MODULE,?LINE]),
not_found;
- _ ->
+ Path ->
+ ct:log("~p:~p Found \"ssh\" at ~p",[?MODULE,?LINE,Path]),
case os:cmd("ssh -V") of
- "OpenSSH" ++ _ ->
+ Version = "OpenSSH" ++ _ ->
+ ct:log("~p:~p Found OpenSSH ~p",[?MODULE,?LINE,Version]),
openSSH;
Str ->
ct:log("ssh client ~p is unknown",[Str]),
diff --git a/lib/ssh/test/ssh_to_openssh_SUITE.erl b/lib/ssh/test/ssh_to_openssh_SUITE.erl
index 86c3d5de26..4d5e5be2a1 100644
--- a/lib/ssh/test/ssh_to_openssh_SUITE.erl
+++ b/lib/ssh/test/ssh_to_openssh_SUITE.erl
@@ -36,7 +36,7 @@
%%--------------------------------------------------------------------
suite() ->
- [{timetrap,{seconds,20}}].
+ [{timetrap,{seconds,60}}].
all() ->
case os:find_executable("ssh") of
@@ -442,7 +442,7 @@ erlang_server_openssh_client_renegotiate(Config) ->
ssh_test_lib:rcv_expected(Expect, OpenSsh, ?TIMEOUT)
of
_ ->
- %% Unfortunatly we can't check that there has been a renegotiation, just trust OpenSSH.
+ %% Unfortunately we can't check that there has been a renegotiation, just trust OpenSSH.
ssh:stop_daemon(Pid)
catch
throw:{skip,R} -> {skip,R}
diff --git a/lib/ssh/test/ssh_trpt_test_lib.erl b/lib/ssh/test/ssh_trpt_test_lib.erl
index bc86000d81..0fa0f0c0e4 100644
--- a/lib/ssh/test/ssh_trpt_test_lib.erl
+++ b/lib/ssh/test/ssh_trpt_test_lib.erl
@@ -93,7 +93,10 @@ exec(Op, S0=#s{}) ->
exit:Exit ->
report_trace(exit, Exit, S1),
- exit(Exit)
+ exit(Exit);
+ Cls:Err ->
+ ct:pal("Class=~p, Error=~p", [Cls,Err]),
+ error("fooooooO")
end;
exec(Op, {ok,S=#s{}}) -> exec(Op, S);
exec(_, Error) -> Error.
diff --git a/lib/ssl/src/Makefile b/lib/ssl/src/Makefile
index 3dda1a3316..2e7df9792e 100644
--- a/lib/ssl/src/Makefile
+++ b/lib/ssl/src/Makefile
@@ -48,9 +48,17 @@ MODULES= \
dtls \
ssl_alert \
ssl_app \
- ssl_dist_sup\
ssl_sup \
+ ssl_admin_sup\
+ tls_connection_sup \
+ ssl_connection_sup \
+ ssl_listen_tracker_sup\
+ dtls_connection_sup \
+ dtls_udp_listener\
dtls_udp_sup \
+ ssl_dist_sup\
+ ssl_dist_admin_sup\
+ ssl_dist_connection_sup\
inet_tls_dist \
inet6_tls_dist \
ssl_certificate\
@@ -61,21 +69,18 @@ MODULES= \
dtls_connection \
ssl_config \
ssl_connection \
- tls_connection_sup \
- dtls_connection_sup \
tls_handshake \
dtls_handshake\
ssl_handshake\
ssl_manager \
ssl_session \
ssl_session_cache \
+ ssl_pem_cache \
ssl_crl\
ssl_crl_cache \
ssl_crl_hash_dir \
tls_socket \
dtls_socket \
- dtls_udp_listener\
- ssl_listen_tracker_sup \
tls_record \
dtls_record \
ssl_record \
diff --git a/lib/ssl/src/dtls_record.erl b/lib/ssl/src/dtls_record.erl
index f447897d59..0ee51c24b6 100644
--- a/lib/ssl/src/dtls_record.erl
+++ b/lib/ssl/src/dtls_record.erl
@@ -393,7 +393,7 @@ init_connection_state_seq(_, ConnnectionStates) ->
integer().
%%
%% Description: Returns the epoch the connection_state record
-%% that is currently defined as the current conection state.
+%% that is currently defined as the current connection state.
%%--------------------------------------------------------------------
current_connection_state_epoch(#{current_read := #{epoch := Epoch}},
read) ->
diff --git a/lib/ssl/src/ssl.app.src b/lib/ssl/src/ssl.app.src
index 9c5d795848..148989174d 100644
--- a/lib/ssl/src/ssl.app.src
+++ b/lib/ssl/src/ssl.app.src
@@ -10,12 +10,14 @@
tls_v1,
ssl_v3,
ssl_v2,
+ tls_connection_sup,
%% DTLS
dtls_connection,
dtls_handshake,
dtls_record,
dtls_socket,
dtls_v1,
+ dtls_connection_sup,
dtls_udp_listener,
dtls_udp_sup,
%% API
@@ -31,16 +33,19 @@
ssl_cipher,
ssl_srp_primes,
ssl_alert,
- ssl_listen_tracker_sup,
+ ssl_listen_tracker_sup, %% may be used by DTLS over SCTP
%% Erlang Distribution over SSL/TLS
inet_tls_dist,
inet6_tls_dist,
ssl_tls_dist_proxy,
ssl_dist_sup,
- %% SSL/TLS session handling
+ ssl_dist_connection_sup,
+ ssl_dist_admin_sup,
+ %% SSL/TLS session and cert handling
ssl_session,
ssl_session_cache,
ssl_manager,
+ ssl_pem_cache,
ssl_pkix_db,
ssl_certificate,
%% CRL handling
@@ -51,8 +56,8 @@
%% App structure
ssl_app,
ssl_sup,
- tls_connection_sup,
- dtls_connection_sup
+ ssl_admin_sup,
+ ssl_connection_sup
]},
{registered, [ssl_sup, ssl_manager]},
{applications, [crypto, public_key, kernel, stdlib]},
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index 0b7229b67e..4a5a7e25ea 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -577,7 +577,7 @@ prf(#sslsocket{pid = {Listen,_}}, _,_,_,_) when is_port(Listen) ->
%% Description: Clear the PEM cache
%%--------------------------------------------------------------------
clear_pem_cache() ->
- ssl_manager:clear_pem_cache().
+ ssl_pem_cache:clear().
%%---------------------------------------------------------------
-spec format_error({error, term()}) -> list().
diff --git a/lib/ssl/src/ssl_admin_sup.erl b/lib/ssl/src/ssl_admin_sup.erl
new file mode 100644
index 0000000000..9c96435753
--- /dev/null
+++ b/lib/ssl/src/ssl_admin_sup.erl
@@ -0,0 +1,95 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_admin_sup).
+
+-behaviour(supervisor).
+
+%% API
+-export([start_link/0, manager_opts/0]).
+
+%% Supervisor callback
+-export([init/1]).
+
+%%%=========================================================================
+%%% API
+%%%=========================================================================
+
+-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%=========================================================================
+%%% Supervisor callback
+%%%=========================================================================
+
+init([]) ->
+ PEMCache = pem_cache_child_spec(),
+ SessionCertManager = session_and_cert_manager_child_spec(),
+ {ok, {{rest_for_one, 10, 3600}, [PEMCache, SessionCertManager]}}.
+
+manager_opts() ->
+ CbOpts = case application:get_env(ssl, session_cb) of
+ {ok, Cb} when is_atom(Cb) ->
+ InitArgs = session_cb_init_args(),
+ [{session_cb, Cb}, {session_cb_init_args, InitArgs}];
+ _ ->
+ []
+ end,
+ case application:get_env(ssl, session_lifetime) of
+ {ok, Time} when is_integer(Time) ->
+ [{session_lifetime, Time}| CbOpts];
+ _ ->
+ CbOpts
+ end.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+pem_cache_child_spec() ->
+ Name = ssl_pem_cache,
+ StartFunc = {ssl_pem_cache, start_link, [[]]},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_pem_cache],
+ Type = worker,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+session_and_cert_manager_child_spec() ->
+ Opts = manager_opts(),
+ Name = ssl_manager,
+ StartFunc = {ssl_manager, start_link, [Opts]},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_manager],
+ Type = worker,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+session_cb_init_args() ->
+ case application:get_env(ssl, session_cb_init_args) of
+ {ok, Args} when is_list(Args) ->
+ Args;
+ _ ->
+ []
+ end.
diff --git a/lib/ssl/src/ssl_certificate.erl b/lib/ssl/src/ssl_certificate.erl
index f359655d85..8aa2aa4081 100644
--- a/lib/ssl/src/ssl_certificate.erl
+++ b/lib/ssl/src/ssl_certificate.erl
@@ -125,21 +125,21 @@ file_to_crls(File, DbHandle) ->
%% Description: Validates ssl/tls specific extensions
%%--------------------------------------------------------------------
validate(_,{extension, #'Extension'{extnID = ?'id-ce-extKeyUsage',
- extnValue = KeyUse}}, {Role, _,_, _, _}) ->
+ extnValue = KeyUse}}, UserState = {Role, _,_, _, _}) ->
case is_valid_extkey_usage(KeyUse, Role) of
true ->
- {valid, Role};
+ {valid, UserState};
false ->
{fail, {bad_cert, invalid_ext_key_usage}}
end;
-validate(_, {extension, _}, Role) ->
- {unknown, Role};
+validate(_, {extension, _}, UserState) ->
+ {unknown, UserState};
validate(_, {bad_cert, _} = Reason, _) ->
{fail, Reason};
-validate(_, valid, Role) ->
- {valid, Role};
-validate(_, valid_peer, Role) ->
- {valid, Role}.
+validate(_, valid, UserState) ->
+ {valid, UserState};
+validate(_, valid_peer, UserState) ->
+ {valid, UserState}.
%%--------------------------------------------------------------------
-spec is_valid_key_usage(list(), term()) -> boolean().
diff --git a/lib/ssl/src/ssl_config.erl b/lib/ssl/src/ssl_config.erl
index 0652d029c3..09d4c3e678 100644
--- a/lib/ssl/src/ssl_config.erl
+++ b/lib/ssl/src/ssl_config.erl
@@ -32,18 +32,20 @@ init(SslOpts, Role) ->
init_manager_name(SslOpts#ssl_options.erl_dist),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbHandle, OwnCert}
+ {ok, #{pem_cache := PemCache} = Config}
= init_certificates(SslOpts, Role),
PrivateKey =
- init_private_key(PemCacheHandle, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile,
+ init_private_key(PemCache, SslOpts#ssl_options.key, SslOpts#ssl_options.keyfile,
SslOpts#ssl_options.password, Role),
- DHParams = init_diffie_hellman(PemCacheHandle, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, PrivateKey, DHParams}.
+ DHParams = init_diffie_hellman(PemCache, SslOpts#ssl_options.dh, SslOpts#ssl_options.dhfile, Role),
+ {ok, Config#{private_key => PrivateKey, dh_params => DHParams}}.
init_manager_name(false) ->
- put(ssl_manager, ssl_manager:manager_name(normal));
+ put(ssl_manager, ssl_manager:name(normal)),
+ put(ssl_pem_cache, ssl_pem_cache:name(normal));
init_manager_name(true) ->
- put(ssl_manager, ssl_manager:manager_name(dist)).
+ put(ssl_manager, ssl_manager:name(dist)),
+ put(ssl_pem_cache, ssl_pem_cache:name(dist)).
init_certificates(#ssl_options{cacerts = CaCerts,
cacertfile = CACertFile,
@@ -51,7 +53,7 @@ init_certificates(#ssl_options{cacerts = CaCerts,
cert = Cert,
crl_cache = CRLCache
}, Role) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo} =
+ {ok, Config} =
try
Certs = case CaCerts of
undefined ->
@@ -59,41 +61,37 @@ init_certificates(#ssl_options{cacerts = CaCerts,
_ ->
{der, CaCerts}
end,
- {ok, _, _, _, _, _, _} = ssl_manager:connection_init(Certs, Role, CRLCache)
+ {ok,_} = ssl_manager:connection_init(Certs, Role, CRLCache)
catch
_:Reason ->
file_error(CACertFile, {cacertfile, Reason})
end,
- init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CRLDbInfo, CertFile, Role).
+ init_certificates(Cert, Config, CertFile, Role).
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle,
- CRLDbInfo, <<>>, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined};
+init_certificates(undefined, Config, <<>>, _) ->
+ {ok, Config#{own_certificate => undefined}};
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle,
- CacheHandle, CRLDbInfo, CertFile, client) ->
+init_certificates(undefined, #{pem_cache := PemCache} = Config, CertFile, client) ->
try
%% Ignoring potential proxy-certificates see:
%% http://dev.globus.org/wiki/Security/ProxyFileFormat
- [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, OwnCert}
+ [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCache),
+ {ok, Config#{own_certificate => OwnCert}}
catch _Error:_Reason ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheHandle, CRLDbInfo, undefined}
- end;
+ {ok, Config#{own_certificate => undefined}}
+ end;
-init_certificates(undefined, CertDbRef, CertDbHandle, FileRefHandle,
- PemCacheHandle, CacheRef, CRLDbInfo, CertFile, server) ->
+init_certificates(undefined, #{pem_cache := PemCache} = Config, CertFile, server) ->
try
- [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCacheHandle),
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, OwnCert}
+ [OwnCert|_] = ssl_certificate:file_to_certificats(CertFile, PemCache),
+ {ok, Config#{own_certificate => OwnCert}}
catch
_:Reason ->
file_error(CertFile, {certfile, Reason})
end;
-init_certificates(Cert, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, _, _) ->
- {ok, CertDbRef, CertDbHandle, FileRefHandle, PemCacheHandle, CacheRef, CRLDbInfo, Cert}.
-
+init_certificates(Cert, Config, _, _) ->
+ {ok, Config#{own_certificate => Cert}}.
+
init_private_key(_, undefined, <<>>, _Password, _Client) ->
undefined;
init_private_key(DbHandle, undefined, KeyFile, Password, _) ->
@@ -135,6 +133,8 @@ file_error(File, Throw) ->
case Throw of
{Opt,{badmatch, {error, {badmatch, Error}}}} ->
throw({options, {Opt, binary_to_list(File), Error}});
+ {Opt, {badmatch, Error}} ->
+ throw({options, {Opt, binary_to_list(File), Error}});
_ ->
throw(Throw)
end.
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index 6ed2fc83da..0c17891fbc 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -323,8 +323,14 @@ handle_session(#server_hello{cipher_suite = CipherSuite,
-spec ssl_config(#ssl_options{}, client | server, #state{}) -> #state{}.
%%--------------------------------------------------------------------
ssl_config(Opts, Role, State) ->
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbInfo,
- OwnCert, Key, DHParams} =
+ {ok, #{cert_db_ref := Ref,
+ cert_db_handle := CertDbHandle,
+ fileref_db_handle := FileRefHandle,
+ session_cache := CacheHandle,
+ crl_db_info := CRLDbHandle,
+ private_key := Key,
+ dh_params := DHParams,
+ own_certificate := OwnCert}} =
ssl_config:init(Opts, Role),
Handshake = ssl_handshake:init_handshake_history(),
TimeStamp = erlang:monotonic_time(),
@@ -335,7 +341,7 @@ ssl_config(Opts, Role, State) ->
file_ref_db = FileRefHandle,
cert_db_ref = Ref,
cert_db = CertDbHandle,
- crl_db = CRLDbInfo,
+ crl_db = CRLDbHandle,
session_cache = CacheHandle,
private_key = Key,
diffie_hellman_params = DHParams,
@@ -1011,7 +1017,7 @@ terminate(_, _, #state{terminated = true}) ->
%% Happens when user closes the connection using ssl:close/1
%% we want to guarantee that Transport:close has been called
%% when ssl:close/1 returns unless it is a downgrade where
- %% we want to guarantee that close alert is recived before
+ %% we want to guarantee that close alert is received before
%% returning. In both cases terminate has been run manually
%% before run by gen_statem which will end up here
ok;
@@ -2428,16 +2434,23 @@ handle_sni_extension(#sni{hostname = Hostname}, State0) ->
undefined ->
State0;
_ ->
- {ok, Ref, CertDbHandle, FileRefHandle, CacheHandle, CRLDbHandle, OwnCert, Key, DHParams} =
- ssl_config:init(NewOptions, State0#state.role),
- State0#state{
- session = State0#state.session#session{own_certificate = OwnCert},
- file_ref_db = FileRefHandle,
- cert_db_ref = Ref,
- cert_db = CertDbHandle,
- crl_db = CRLDbHandle,
- session_cache = CacheHandle,
- private_key = Key,
+ {ok, #{cert_db_ref := Ref,
+ cert_db_handle := CertDbHandle,
+ fileref_db_handle := FileRefHandle,
+ session_cache := CacheHandle,
+ crl_db_info := CRLDbHandle,
+ private_key := Key,
+ dh_params := DHParams,
+ own_certificate := OwnCert}} =
+ ssl_config:init(NewOptions, State0#state.role),
+ State0#state{
+ session = State0#state.session#session{own_certificate = OwnCert},
+ file_ref_db = FileRefHandle,
+ cert_db_ref = Ref,
+ cert_db = CertDbHandle,
+ crl_db = CRLDbHandle,
+ session_cache = CacheHandle,
+ private_key = Key,
diffie_hellman_params = DHParams,
ssl_options = NewOptions,
sni_hostname = Hostname
diff --git a/lib/ssl/src/ssl_connection_sup.erl b/lib/ssl/src/ssl_connection_sup.erl
new file mode 100644
index 0000000000..1a1f43e683
--- /dev/null
+++ b/lib/ssl/src/ssl_connection_sup.erl
@@ -0,0 +1,101 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_connection_sup).
+
+-behaviour(supervisor).
+
+%% API
+-export([start_link/0]).
+
+%% Supervisor callback
+-export([init/1]).
+
+%%%=========================================================================
+%%% API
+%%%=========================================================================
+
+-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%=========================================================================
+%%% Supervisor callback
+%%%=========================================================================
+
+init([]) ->
+
+ TLSConnetionManager = tls_connection_manager_child_spec(),
+ %% Handles emulated options so that they inherited by the accept
+ %% socket, even when setopts is performed on the listen socket
+ ListenOptionsTracker = listen_options_tracker_child_spec(),
+
+ DTLSConnetionManager = dtls_connection_manager_child_spec(),
+ DTLSUdpListeners = dtls_udp_listeners_spec(),
+
+ {ok, {{one_for_one, 10, 3600}, [TLSConnetionManager,
+ ListenOptionsTracker,
+ DTLSConnetionManager,
+ DTLSUdpListeners
+ ]}}.
+
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+tls_connection_manager_child_spec() ->
+ Name = tls_connection,
+ StartFunc = {tls_connection_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [tls_connection_sup],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+dtls_connection_manager_child_spec() ->
+ Name = dtls_connection,
+ StartFunc = {dtls_connection_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [dtls_connection_sup],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+listen_options_tracker_child_spec() ->
+ Name = tls_socket,
+ StartFunc = {ssl_listen_tracker_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [tls_socket],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+dtls_udp_listeners_spec() ->
+ Name = dtls_udp_listener,
+ StartFunc = {dtls_udp_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
diff --git a/lib/ssl/src/ssl_crl.erl b/lib/ssl/src/ssl_crl.erl
index fc60bdba67..33375b5e09 100644
--- a/lib/ssl/src/ssl_crl.erl
+++ b/lib/ssl/src/ssl_crl.erl
@@ -29,7 +29,7 @@
-export([trusted_cert_and_path/3]).
-trusted_cert_and_path(CRL, {SerialNumber, Issuer},{Db, DbRef} = DbHandle) ->
+trusted_cert_and_path(CRL, {SerialNumber, Issuer},{_, {Db, DbRef}} = DbHandle) ->
case ssl_pkix_db:lookup_trusted_cert(Db, DbRef, SerialNumber, Issuer) of
undefined ->
trusted_cert_and_path(CRL, issuer_not_found, DbHandle);
@@ -37,17 +37,34 @@ trusted_cert_and_path(CRL, {SerialNumber, Issuer},{Db, DbRef} = DbHandle) ->
{ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
{ok, Root, lists:reverse(Chain)}
end;
-
-trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef} = DbHandle) ->
- case find_issuer(CRL, DbHandle) of
+trusted_cert_and_path(CRL, issuer_not_found, {CertPath, {Db, DbRef}}) ->
+ case find_issuer(CRL, {certpath,
+ [{Der, public_key:pkix_decode_cert(Der,otp)} || Der <- CertPath]}) of
{ok, OtpCert} ->
{ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
{ok, Root, lists:reverse(Chain)};
{error, issuer_not_found} ->
- {ok, unknown_crl_ca, []}
- end.
+ trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef})
+ end;
+trusted_cert_and_path(CRL, issuer_not_found, {Db, DbRef} = DbInfo) ->
+ case find_issuer(CRL, DbInfo) of
+ {ok, OtpCert} ->
+ {ok, Root, Chain} = ssl_certificate:certificate_chain(OtpCert, Db, DbRef),
+ {ok, Root, lists:reverse(Chain)};
+ {error, issuer_not_found} ->
+ {error, unknown_ca}
+ end.
-find_issuer(CRL, {Db,DbRef}) ->
+find_issuer(CRL, {certpath = Db, DbRef}) ->
+ Issuer = public_key:pkix_normalize_name(public_key:pkix_crl_issuer(CRL)),
+ IsIssuerFun =
+ fun({_Der,ErlCertCandidate}, Acc) ->
+ verify_crl_issuer(CRL, ErlCertCandidate, Issuer, Acc);
+ (_, Acc) ->
+ Acc
+ end,
+ find_issuer(IsIssuerFun, Db, DbRef);
+find_issuer(CRL, {Db, DbRef}) ->
Issuer = public_key:pkix_normalize_name(public_key:pkix_crl_issuer(CRL)),
IsIssuerFun =
fun({_Key, {_Der,ErlCertCandidate}}, Acc) ->
@@ -55,26 +72,33 @@ find_issuer(CRL, {Db,DbRef}) ->
(_, Acc) ->
Acc
end,
- if is_reference(DbRef) -> % actual DB exists
- try ssl_pkix_db:foldl(IsIssuerFun, issuer_not_found, Db) of
- issuer_not_found ->
- {error, issuer_not_found}
- catch
- {ok, _} = Result ->
- Result
- end;
- is_tuple(DbRef), element(1,DbRef) =:= extracted -> % cache bypass byproduct
- {extracted, CertsData} = DbRef,
- Certs = [Entry || {decoded, Entry} <- CertsData],
- try lists:foldl(IsIssuerFun, issuer_not_found, Certs) of
- issuer_not_found ->
- {error, issuer_not_found}
- catch
- {ok, _} = Result ->
- Result
- end
- end.
+ find_issuer(IsIssuerFun, Db, DbRef).
+find_issuer(IsIssuerFun, certpath, Certs) ->
+ try lists:foldl(IsIssuerFun, issuer_not_found, Certs) of
+ issuer_not_found ->
+ {error, issuer_not_found}
+ catch
+ {ok, _} = Result ->
+ Result
+ end;
+find_issuer(IsIssuerFun, extracted, CertsData) ->
+ Certs = [Entry || {decoded, Entry} <- CertsData],
+ try lists:foldl(IsIssuerFun, issuer_not_found, Certs) of
+ issuer_not_found ->
+ {error, issuer_not_found}
+ catch
+ {ok, _} = Result ->
+ Result
+ end;
+find_issuer(IsIssuerFun, Db, _) ->
+ try ssl_pkix_db:foldl(IsIssuerFun, issuer_not_found, Db) of
+ issuer_not_found ->
+ {error, issuer_not_found}
+ catch
+ {ok, _} = Result ->
+ Result
+ end.
verify_crl_issuer(CRL, ErlCertCandidate, Issuer, NotIssuer) ->
TBSCert = ErlCertCandidate#'OTPCertificate'.tbsCertificate,
diff --git a/lib/ssl/src/ssl_dist_admin_sup.erl b/lib/ssl/src/ssl_dist_admin_sup.erl
new file mode 100644
index 0000000000..f60806c4cb
--- /dev/null
+++ b/lib/ssl/src/ssl_dist_admin_sup.erl
@@ -0,0 +1,74 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2016-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_dist_admin_sup).
+
+-behaviour(supervisor).
+
+%% API
+-export([start_link/0]).
+
+%% Supervisor callback
+-export([init/1]).
+
+%%%=========================================================================
+%%% API
+%%%=========================================================================
+
+-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%=========================================================================
+%%% Supervisor callback
+%%%=========================================================================
+
+init([]) ->
+ PEMCache = pem_cache_child_spec(),
+ SessionCertManager = session_and_cert_manager_child_spec(),
+ {ok, {{rest_for_one, 10, 3600}, [PEMCache, SessionCertManager]}}.
+
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+pem_cache_child_spec() ->
+ Name = ssl_pem_cache_dist,
+ StartFunc = {ssl_pem_cache, start_link_dist, [[]]},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_pem_cache],
+ Type = worker,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+session_and_cert_manager_child_spec() ->
+ Opts = ssl_admin_sup:manager_opts(),
+ Name = ssl_dist_manager,
+ StartFunc = {ssl_manager, start_link_dist, [Opts]},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_manager],
+ Type = worker,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
diff --git a/lib/ssl/src/ssl_dist_connection_sup.erl b/lib/ssl/src/ssl_dist_connection_sup.erl
new file mode 100644
index 0000000000..e5842c866e
--- /dev/null
+++ b/lib/ssl/src/ssl_dist_connection_sup.erl
@@ -0,0 +1,79 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 1998-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%
+
+-module(ssl_dist_connection_sup).
+
+-behaviour(supervisor).
+
+%% API
+-export([start_link/0]).
+
+%% Supervisor callback
+-export([init/1]).
+
+%%%=========================================================================
+%%% API
+%%%=========================================================================
+
+-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
+
+start_link() ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+%%%=========================================================================
+%%% Supervisor callback
+%%%=========================================================================
+
+init([]) ->
+
+ TLSConnetionManager = tls_connection_manager_child_spec(),
+ %% Handles emulated options so that they inherited by the accept
+ %% socket, even when setopts is performed on the listen socket
+ ListenOptionsTracker = listen_options_tracker_child_spec(),
+
+ {ok, {{one_for_one, 10, 3600}, [TLSConnetionManager,
+ ListenOptionsTracker
+ ]}}.
+
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+
+tls_connection_manager_child_spec() ->
+ Name = dist_tls_connection,
+ StartFunc = {tls_connection_sup, start_link_dist, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [tls_connection_sup],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
+listen_options_tracker_child_spec() ->
+ Name = dist_tls_socket,
+ StartFunc = {ssl_listen_tracker_sup, start_link_dist, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [tls_socket],
+ Type = supervisor,
+ {Name, StartFunc, Restart, Shutdown, Type, Modules}.
+
diff --git a/lib/ssl/src/ssl_dist_sup.erl b/lib/ssl/src/ssl_dist_sup.erl
index d47cd76bf5..690b896919 100644
--- a/lib/ssl/src/ssl_dist_sup.erl
+++ b/lib/ssl/src/ssl_dist_sup.erl
@@ -44,34 +44,29 @@ start_link() ->
%%%=========================================================================
init([]) ->
- SessionCertManager = session_and_cert_manager_child_spec(),
- ConnetionManager = connection_manager_child_spec(),
- ListenOptionsTracker = listen_options_tracker_child_spec(),
+ AdminSup = ssl_admin_child_spec(),
+ ConnectionSup = ssl_connection_sup(),
ProxyServer = proxy_server_child_spec(),
-
- {ok, {{one_for_all, 10, 3600}, [SessionCertManager, ConnetionManager,
- ListenOptionsTracker,
- ProxyServer]}}.
+ {ok, {{one_for_all, 10, 3600}, [AdminSup, ProxyServer, ConnectionSup]}}.
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-session_and_cert_manager_child_spec() ->
- Opts = ssl_sup:manager_opts(),
- Name = ssl_manager_dist,
- StartFunc = {ssl_manager, start_link_dist, [Opts]},
+ssl_admin_child_spec() ->
+ Name = ssl_dist_admin_sup,
+ StartFunc = {ssl_dist_admin_sup, start_link , []},
Restart = permanent,
Shutdown = 4000,
- Modules = [ssl_manager],
- Type = worker,
+ Modules = [ssl_admin_sup],
+ Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-connection_manager_child_spec() ->
- Name = ssl_connection_dist,
- StartFunc = {tls_connection_sup, start_link_dist, []},
- Restart = permanent,
- Shutdown = infinity,
- Modules = [tls_connection_sup],
+ssl_connection_sup() ->
+ Name = ssl_dist_connection_sup,
+ StartFunc = {ssl_dist_connection_sup, start_link, []},
+ Restart = permanent,
+ Shutdown = 4000,
+ Modules = [ssl_connection_sup],
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
@@ -83,12 +78,3 @@ proxy_server_child_spec() ->
Modules = [ssl_tls_dist_proxy],
Type = worker,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-
-listen_options_tracker_child_spec() ->
- Name = tls_socket_dist,
- StartFunc = {ssl_listen_tracker_sup, start_link_dist, []},
- Restart = permanent,
- Shutdown = 4000,
- Modules = [tls_socket],
- Type = supervisor,
- {Name, StartFunc, Restart, Shutdown, Type, Modules}.
diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl
index 4acc745c5f..cb61c82334 100644
--- a/lib/ssl/src/ssl_handshake.erl
+++ b/lib/ssl/src/ssl_handshake.erl
@@ -397,14 +397,13 @@ verify_signature(_, Hash, {HashAlgo, _SignAlg}, Signature,
%%--------------------------------------------------------------------
certify(#certificate{asn1_certificates = ASN1Certs}, CertDbHandle, CertDbRef,
MaxPathLen, _Verify, ValidationFunAndState0, PartialChain, CRLCheck, CRLDbHandle, Role) ->
- [PeerCert | _] = ASN1Certs,
-
- ValidationFunAndState = validation_fun_and_state(ValidationFunAndState0, Role,
- CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle),
-
+ [PeerCert | _] = ASN1Certs,
try
{TrustedCert, CertPath} =
ssl_certificate:trusted_cert_and_path(ASN1Certs, CertDbHandle, CertDbRef, PartialChain),
+ ValidationFunAndState = validation_fun_and_state(ValidationFunAndState0, Role,
+ CertDbHandle, CertDbRef,
+ CRLCheck, CRLDbHandle, CertPath),
case public_key:pkix_path_validation(TrustedCert,
CertPath,
[{max_path_length, MaxPathLen},
@@ -1541,7 +1540,8 @@ sni1(Hostname) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) ->
+validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef,
+ CRLCheck, CRLDbHandle, CertPath) ->
{fun(OtpCert, {extension, _} = Extension, {SslState, UserState}) ->
case ssl_certificate:validate(OtpCert,
Extension,
@@ -1550,22 +1550,25 @@ validation_fun_and_state({Fun, UserState0}, Role, CertDbHandle, CertDbRef, CRLC
{valid, {NewSslState, UserState}};
{fail, Reason} ->
apply_user_fun(Fun, OtpCert, Reason, UserState,
- SslState);
+ SslState, CertPath);
{unknown, _} ->
apply_user_fun(Fun, OtpCert,
- Extension, UserState, SslState)
+ Extension, UserState, SslState, CertPath)
end;
(OtpCert, VerifyResult, {SslState, UserState}) ->
apply_user_fun(Fun, OtpCert, VerifyResult, UserState,
- SslState)
+ SslState, CertPath)
end, {{Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}, UserState0}};
-validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle) ->
+validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef,
+ CRLCheck, CRLDbHandle, CertPath) ->
{fun(OtpCert, {extension, _} = Extension, SslState) ->
ssl_certificate:validate(OtpCert,
Extension,
SslState);
- (OtpCert, VerifyResult, SslState) when (VerifyResult == valid) or (VerifyResult == valid_peer) ->
- case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of
+ (OtpCert, VerifyResult, SslState) when (VerifyResult == valid) or
+ (VerifyResult == valid_peer) ->
+ case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef,
+ CRLDbHandle, VerifyResult, CertPath) of
valid ->
{VerifyResult, SslState};
Reason ->
@@ -1578,20 +1581,21 @@ validation_fun_and_state(undefined, Role, CertDbHandle, CertDbRef, CRLCheck, CRL
end, {Role, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle}}.
apply_user_fun(Fun, OtpCert, VerifyResult, UserState0,
- {_, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle} = SslState) when
+ {_, CertDbHandle, CertDbRef, CRLCheck, CRLDbHandle} = SslState, CertPath) when
(VerifyResult == valid) or (VerifyResult == valid_peer) ->
case Fun(OtpCert, VerifyResult, UserState0) of
{Valid, UserState} when (Valid == valid) or (Valid == valid_peer) ->
- case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef, CRLDbHandle, VerifyResult) of
+ case crl_check(OtpCert, CRLCheck, CertDbHandle, CertDbRef,
+ CRLDbHandle, VerifyResult, CertPath) of
valid ->
{Valid, {SslState, UserState}};
Result ->
- apply_user_fun(Fun, OtpCert, Result, UserState, SslState)
+ apply_user_fun(Fun, OtpCert, Result, UserState, SslState, CertPath)
end;
{fail, _} = Fail ->
Fail
end;
-apply_user_fun(Fun, OtpCert, ExtensionOrError, UserState0, SslState) ->
+apply_user_fun(Fun, OtpCert, ExtensionOrError, UserState0, SslState, _CertPath) ->
case Fun(OtpCert, ExtensionOrError, UserState0) of
{Valid, UserState} when (Valid == valid) or (Valid == valid_peer)->
{Valid, {SslState, UserState}};
@@ -2187,13 +2191,14 @@ handle_psk_identity(_PSKIdentity, LookupFun)
handle_psk_identity(PSKIdentity, {Fun, UserState}) ->
Fun(psk, PSKIdentity, UserState).
-crl_check(_, false, _,_,_, _) ->
+crl_check(_, false, _,_,_, _, _) ->
valid;
-crl_check(_, peer, _, _,_, valid) -> %% Do not check CAs with this option.
+crl_check(_, peer, _, _,_, valid, _) -> %% Do not check CAs with this option.
valid;
-crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _) ->
+crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _, CertPath) ->
Options = [{issuer_fun, {fun(_DP, CRL, Issuer, DBInfo) ->
- ssl_crl:trusted_cert_and_path(CRL, Issuer, DBInfo)
+ ssl_crl:trusted_cert_and_path(CRL, Issuer, {CertPath,
+ DBInfo})
end, {CertDbHandle, CertDbRef}}},
{update_crl, fun(DP, CRL) -> Callback:fresh_crl(DP, CRL) end}
],
@@ -2229,7 +2234,8 @@ dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) ->
no_dps;
DistPoints ->
Issuer = OtpCert#'OTPCertificate'.tbsCertificate#'OTPTBSCertificate'.issuer,
- distpoints_lookup(DistPoints, Issuer, Callback, CRLDbHandle)
+ CRLs = distpoints_lookup(DistPoints, Issuer, Callback, CRLDbHandle),
+ dps_and_crls(DistPoints, CRLs, [])
end;
dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) ->
@@ -2242,7 +2248,13 @@ dps_and_crls(OtpCert, Callback, CRLDbHandle, same_issuer) ->
end, GenNames),
[{DP, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs].
-distpoints_lookup([], _, _, _) ->
+dps_and_crls([], _, Acc) ->
+ Acc;
+dps_and_crls([DP | Rest], CRLs, Acc) ->
+ DpCRL = [{DP, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs],
+ dps_and_crls(Rest, CRLs, DpCRL ++ Acc).
+
+distpoints_lookup([],_, _, _) ->
[];
distpoints_lookup([DistPoint | Rest], Issuer, Callback, CRLDbHandle) ->
Result =
@@ -2257,7 +2269,7 @@ distpoints_lookup([DistPoint | Rest], Issuer, Callback, CRLDbHandle) ->
not_available ->
distpoints_lookup(Rest, Issuer, Callback, CRLDbHandle);
CRLs ->
- [{DistPoint, {CRL, public_key:der_decode('CertificateList', CRL)}} || CRL <- CRLs]
+ CRLs
end.
sign_algo(?rsaEncryption) ->
diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl
index 5bd9521de7..2b82f18bb5 100644
--- a/lib/ssl/src/ssl_manager.erl
+++ b/lib/ssl/src/ssl_manager.erl
@@ -32,10 +32,9 @@
new_session_id/1, clean_cert_db/2,
register_session/2, register_session/3, invalidate_session/2,
insert_crls/2, insert_crls/3, delete_crls/1, delete_crls/2,
- invalidate_session/3, invalidate_pem/1, clear_pem_cache/0, manager_name/1]).
+ invalidate_session/3, name/1]).
-% Spawn export
--export([init_session_validator/1, init_pem_cache_validator/1]).
+-export([init_session_validator/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
@@ -52,9 +51,7 @@
session_lifetime :: integer(),
certificate_db :: db_handle(),
session_validation_timer :: reference(),
- last_delay_timer = {undefined, undefined},%% Keep for testing purposes
- last_pem_check :: erlang:timestamp(),
- clear_pem_cache :: integer(),
+ last_delay_timer = {undefined, undefined},%% Keep for testing purposes
session_cache_client_max :: integer(),
session_cache_server_max :: integer(),
session_server_invalidator :: undefined | pid(),
@@ -63,7 +60,6 @@
-define(GEN_UNIQUE_ID_MAX_TRIES, 10).
-define(SESSION_VALIDATION_INTERVAL, 60000).
--define(CLEAR_PEM_CACHE, 120000).
-define(CLEAN_SESSION_DB, 60000).
-define(CLEAN_CERT_DB, 500).
-define(DEFAULT_MAX_SESSION_CACHE, 1000).
@@ -74,14 +70,14 @@
%%====================================================================
%%--------------------------------------------------------------------
--spec manager_name(normal | dist) -> atom().
+-spec name(normal | dist) -> atom().
%%
%% Description: Returns the registered name of the ssl manager process
%% in the operation modes 'normal' and 'dist'.
%%--------------------------------------------------------------------
-manager_name(normal) ->
+name(normal) ->
?MODULE;
-manager_name(dist) ->
+name(dist) ->
list_to_atom(atom_to_list(?MODULE) ++ "dist").
%%--------------------------------------------------------------------
@@ -91,9 +87,10 @@ manager_name(dist) ->
%% and certificate caching.
%%--------------------------------------------------------------------
start_link(Opts) ->
- DistMangerName = manager_name(normal),
- gen_server:start_link({local, DistMangerName},
- ?MODULE, [DistMangerName, Opts], []).
+ MangerName = name(normal),
+ CacheName = ssl_pem_cache:name(normal),
+ gen_server:start_link({local, MangerName},
+ ?MODULE, [MangerName, CacheName, Opts], []).
%%--------------------------------------------------------------------
-spec start_link_dist(list()) -> {ok, pid()} | ignore | {error, term()}.
@@ -102,38 +99,23 @@ start_link(Opts) ->
%% be used by the erlang distribution. Note disables soft upgrade!
%%--------------------------------------------------------------------
start_link_dist(Opts) ->
- DistMangerName = manager_name(dist),
+ DistMangerName = name(dist),
+ DistCacheName = ssl_pem_cache:name(dist),
gen_server:start_link({local, DistMangerName},
- ?MODULE, [DistMangerName, Opts], []).
+ ?MODULE, [DistMangerName, DistCacheName, Opts], []).
%%--------------------------------------------------------------------
-spec connection_init(binary()| {der, list()}, client | server,
{Cb :: atom(), Handle:: term()}) ->
- {ok, certdb_ref(), db_handle(), db_handle(),
- db_handle(), db_handle(), CRLInfo::term()}.
+ {ok, map()}.
%%
%% Description: Do necessary initializations for a new connection.
%%--------------------------------------------------------------------
connection_init({der, _} = Trustedcerts, Role, CRLCache) ->
- case bypass_pem_cache() of
- true ->
- {ok, Extracted} = ssl_pkix_db:extract_trusted_certs(Trustedcerts),
- call({connection_init, Extracted, Role, CRLCache});
- false ->
- call({connection_init, Trustedcerts, Role, CRLCache})
- end;
-
-connection_init(<<>> = Trustedcerts, Role, CRLCache) ->
- call({connection_init, Trustedcerts, Role, CRLCache});
-
+ {ok, Extracted} = ssl_pkix_db:extract_trusted_certs(Trustedcerts),
+ call({connection_init, Extracted, Role, CRLCache});
connection_init(Trustedcerts, Role, CRLCache) ->
- case bypass_pem_cache() of
- true ->
- {ok, Extracted} = ssl_pkix_db:extract_trusted_certs(Trustedcerts),
- call({connection_init, Extracted, Role, CRLCache});
- false ->
- call({connection_init, Trustedcerts, Role, CRLCache})
- end.
+ call({connection_init, Trustedcerts, Role, CRLCache}).
%%--------------------------------------------------------------------
-spec cache_pem_file(binary(), term()) -> {ok, term()} | {error, reason()}.
@@ -141,31 +123,14 @@ connection_init(Trustedcerts, Role, CRLCache) ->
%% Description: Cache a pem file and return its content.
%%--------------------------------------------------------------------
cache_pem_file(File, DbHandle) ->
- case bypass_pem_cache() of
- true ->
- ssl_pkix_db:decode_pem_file(File);
- false ->
- case ssl_pkix_db:lookup_cached_pem(DbHandle, File) of
- [{Content,_}] ->
- {ok, Content};
- [Content] ->
- {ok, Content};
- undefined ->
- call({cache_pem, File})
- end
+ case ssl_pkix_db:lookup(File, DbHandle) of
+ [Content] ->
+ {ok, Content};
+ undefined ->
+ ssl_pem_cache:insert(File)
end.
%%--------------------------------------------------------------------
--spec clear_pem_cache() -> ok.
-%%
-%% Description: Clear the PEM cache
-%%--------------------------------------------------------------------
-clear_pem_cache() ->
- %% Not supported for distribution at the moement, should it be?
- put(ssl_manager, manager_name(normal)),
- call(unconditionally_clear_pem_cache).
-
-%%--------------------------------------------------------------------
-spec lookup_trusted_cert(term(), reference(), serialnumber(), issuer()) ->
undefined |
{ok, {der_cert(), #'OTPCertificate'{}}}.
@@ -222,26 +187,22 @@ invalidate_session(Port, Session) ->
load_mitigation(),
cast({invalidate_session, Port, Session}).
--spec invalidate_pem(File::binary()) -> ok.
-invalidate_pem(File) ->
- cast({invalidate_pem, File}).
-
insert_crls(Path, CRLs)->
insert_crls(Path, CRLs, normal).
insert_crls(?NO_DIST_POINT_PATH = Path, CRLs, ManagerType)->
- put(ssl_manager, manager_name(ManagerType)),
+ put(ssl_manager, name(ManagerType)),
cast({insert_crls, Path, CRLs});
insert_crls(Path, CRLs, ManagerType)->
- put(ssl_manager, manager_name(ManagerType)),
+ put(ssl_manager, name(ManagerType)),
call({insert_crls, Path, CRLs}).
delete_crls(Path)->
delete_crls(Path, normal).
delete_crls(?NO_DIST_POINT_PATH = Path, ManagerType)->
- put(ssl_manager, manager_name(ManagerType)),
+ put(ssl_manager, name(ManagerType)),
cast({delete_crls, Path});
delete_crls(Path, ManagerType)->
- put(ssl_manager, manager_name(ManagerType)),
+ put(ssl_manager, name(ManagerType)),
call({delete_crls, Path}).
%%====================================================================
@@ -255,13 +216,14 @@ delete_crls(Path, ManagerType)->
%%
%% Description: Initiates the server
%%--------------------------------------------------------------------
-init([Name, Opts]) ->
- put(ssl_manager, Name),
+init([ManagerName, PemCacheName, Opts]) ->
+ put(ssl_manager, ManagerName),
+ put(ssl_pem_cache, PemCacheName),
process_flag(trap_exit, true),
CacheCb = proplists:get_value(session_cb, Opts, ssl_session_cache),
SessionLifeTime =
proplists:get_value(session_lifetime, Opts, ?'24H_in_sec'),
- CertDb = ssl_pkix_db:create(),
+ CertDb = ssl_pkix_db:create(PemCacheName),
ClientSessionCache =
CacheCb:init([{role, client} |
proplists:get_value(session_cb_init_args, Opts, [])]),
@@ -270,16 +232,12 @@ init([Name, Opts]) ->
proplists:get_value(session_cb_init_args, Opts, [])]),
Timer = erlang:send_after(SessionLifeTime * 1000 + 5000,
self(), validate_sessions),
- Interval = pem_check_interval(),
- erlang:send_after(Interval, self(), clear_pem_cache),
{ok, #state{certificate_db = CertDb,
session_cache_client = ClientSessionCache,
session_cache_server = ServerSessionCache,
session_cache_cb = CacheCb,
session_lifetime = SessionLifeTime,
session_validation_timer = Timer,
- last_pem_check = os:timestamp(),
- clear_pem_cache = Interval,
session_cache_client_max =
max_session_cache_size(session_cache_client_max),
session_cache_server_max =
@@ -302,18 +260,25 @@ init([Name, Opts]) ->
handle_call({{connection_init, <<>>, Role, {CRLCb, UserCRLDb}}, _Pid}, _From,
#state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
Ref = make_ref(),
- Result = {ok, Ref, CertDb, FileRefDb, PemChace,
- session_cache(Role, State), {CRLCb, crl_db_info(Db, UserCRLDb)}},
- {reply, Result, State#state{certificate_db = Db}};
+ {reply, {ok, #{cert_db_ref => Ref,
+ cert_db_handle => CertDb,
+ fileref_db_handle => FileRefDb,
+ pem_cache => PemChace,
+ session_cache => session_cache(Role, State),
+ crl_db_info => {CRLCb, crl_db_info(Db, UserCRLDb)}}}, State};
handle_call({{connection_init, Trustedcerts, Role, {CRLCb, UserCRLDb}}, Pid}, _From,
#state{certificate_db = [CertDb, FileRefDb, PemChace | _] = Db} = State) ->
case add_trusted_certs(Pid, Trustedcerts, Db) of
{ok, Ref} ->
- {reply, {ok, Ref, CertDb, FileRefDb, PemChace, session_cache(Role, State),
- {CRLCb, crl_db_info(Db, UserCRLDb)}}, State};
- {error, _} = Error ->
- {reply, Error, State}
+ {reply, {ok, #{cert_db_ref => Ref,
+ cert_db_handle => CertDb,
+ fileref_db_handle => FileRefDb,
+ pem_cache => PemChace,
+ session_cache => session_cache(Role, State),
+ crl_db_info => {CRLCb, crl_db_info(Db, UserCRLDb)}}}, State};
+ {error, _} = Error ->
+ {reply, Error, State}
end;
handle_call({{insert_crls, Path, CRLs}, _}, _From,
@@ -330,21 +295,7 @@ handle_call({{new_session_id, Port}, _},
_, #state{session_cache_cb = CacheCb,
session_cache_server = Cache} = State) ->
Id = new_id(Port, ?GEN_UNIQUE_ID_MAX_TRIES, Cache, CacheCb),
- {reply, Id, State};
-
-handle_call({{cache_pem,File}, _Pid}, _,
- #state{certificate_db = Db} = State) ->
- try ssl_pkix_db:cache_pem_file(File, Db) of
- Result ->
- {reply, Result, State}
- catch
- _:Reason ->
- {reply, {error, Reason}, State}
- end;
-handle_call({unconditionally_clear_pem_cache, _},_,
- #state{certificate_db = [_,_,PemChace | _]} = State) ->
- ssl_pkix_db:clear(PemChace),
- {reply, ok, State}.
+ {reply, Id, State}.
%%--------------------------------------------------------------------
-spec handle_cast(msg(), #state{}) -> {noreply, #state{}}.
@@ -382,11 +333,6 @@ handle_cast({insert_crls, Path, CRLs},
handle_cast({delete_crls, CRLsOrPath},
#state{certificate_db = Db} = State) ->
ssl_pkix_db:remove_crls(Db, CRLsOrPath),
- {noreply, State};
-
-handle_cast({invalidate_pem, File},
- #state{certificate_db = [_, _, PemCache | _]} = State) ->
- ssl_pkix_db:remove(File, PemCache),
{noreply, State}.
%%--------------------------------------------------------------------
@@ -418,22 +364,14 @@ handle_info({delayed_clean_session, Key, Cache}, #state{session_cache_cb = Cache
CacheCb:delete(Cache, Key),
{noreply, State};
-handle_info(clear_pem_cache, #state{certificate_db = [_,_,PemChace | _],
- clear_pem_cache = Interval,
- last_pem_check = CheckPoint} = State) ->
- NewCheckPoint = os:timestamp(),
- start_pem_cache_validator(PemChace, CheckPoint),
- erlang:send_after(Interval, self(), clear_pem_cache),
- {noreply, State#state{last_pem_check = NewCheckPoint}};
-
handle_info({clean_cert_db, Ref, File},
- #state{certificate_db = [CertDb,RefDb, PemCache | _]} = State) ->
+ #state{certificate_db = [CertDb, {RefDb, FileMapDb} | _]} = State) ->
case ssl_pkix_db:lookup(Ref, RefDb) of
undefined -> %% Alredy cleaned
ok;
_ ->
- clean_cert_db(Ref, CertDb, RefDb, PemCache, File)
+ clean_cert_db(Ref, CertDb, RefDb, FileMapDb, File)
end,
{noreply, State};
@@ -523,14 +461,6 @@ delay_time() ->
?CLEAN_SESSION_DB
end.
-bypass_pem_cache() ->
- case application:get_env(ssl, bypass_pem_cache) of
- {ok, Bool} when is_boolean(Bool) ->
- Bool;
- _ ->
- false
- end.
-
max_session_cache_size(CacheType) ->
case application:get_env(ssl, CacheType) of
{ok, Size} when is_integer(Size) ->
@@ -594,16 +524,11 @@ new_id(Port, Tries, Cache, CacheCb) ->
new_id(Port, Tries - 1, Cache, CacheCb)
end.
-clean_cert_db(Ref, CertDb, RefDb, PemCache, File) ->
+clean_cert_db(Ref, CertDb, RefDb, FileMapDb, File) ->
case ssl_pkix_db:ref_count(Ref, RefDb, 0) of
0 ->
- case ssl_pkix_db:lookup_cached_pem(PemCache, File) of
- [{Content, Ref}] ->
- ssl_pkix_db:insert(File, Content, PemCache);
- _ ->
- ok
- end,
ssl_pkix_db:remove(Ref, RefDb),
+ ssl_pkix_db:remove(File, FileMapDb),
ssl_pkix_db:remove_trusted_certs(Ref, CertDb);
_ ->
ok
@@ -687,42 +612,6 @@ exists_equivalent(#session{
exists_equivalent(Session, [ _ | Rest]) ->
exists_equivalent(Session, Rest).
-start_pem_cache_validator(PemCache, CheckPoint) ->
- spawn_link(?MODULE, init_pem_cache_validator,
- [[get(ssl_manager), PemCache, CheckPoint]]).
-
-init_pem_cache_validator([SslManagerName, PemCache, CheckPoint]) ->
- put(ssl_manager, SslManagerName),
- ssl_pkix_db:foldl(fun pem_cache_validate/2,
- CheckPoint, PemCache).
-
-pem_cache_validate({File, _}, CheckPoint) ->
- case file:read_file_info(File, []) of
- {ok, #file_info{mtime = Time}} ->
- case is_before_checkpoint(Time, CheckPoint) of
- true ->
- ok;
- false ->
- invalidate_pem(File)
- end;
- _ ->
- invalidate_pem(File)
- end,
- CheckPoint.
-
-pem_check_interval() ->
- case application:get_env(ssl, ssl_pem_cache_clean) of
- {ok, Interval} when is_integer(Interval) ->
- Interval;
- _ ->
- ?CLEAR_PEM_CACHE
- end.
-
-is_before_checkpoint(Time, CheckPoint) ->
- calendar:datetime_to_gregorian_seconds(
- calendar:now_to_datetime(CheckPoint)) -
- calendar:datetime_to_gregorian_seconds(Time) > 0.
-
add_trusted_certs(Pid, Trustedcerts, Db) ->
try
ssl_pkix_db:add_trusted_certs(Pid, Trustedcerts, Db)
diff --git a/lib/ssl/src/ssl_pem_cache.erl b/lib/ssl/src/ssl_pem_cache.erl
new file mode 100644
index 0000000000..f63a301f69
--- /dev/null
+++ b/lib/ssl/src/ssl_pem_cache.erl
@@ -0,0 +1,266 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 20016-2016. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+
+%%----------------------------------------------------------------------
+%% Purpose: Manages ssl sessions and trusted certifacates
+%%----------------------------------------------------------------------
+
+-module(ssl_pem_cache).
+-behaviour(gen_server).
+
+%% Internal application API
+-export([start_link/1,
+ start_link_dist/1,
+ name/1,
+ insert/1,
+ clear/0]).
+
+% Spawn export
+-export([init_pem_cache_validator/1]).
+
+%% gen_server callbacks
+-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
+ terminate/2, code_change/3]).
+
+-include("ssl_handshake.hrl").
+-include("ssl_internal.hrl").
+-include_lib("kernel/include/file.hrl").
+
+-record(state, {
+ pem_cache,
+ last_pem_check :: erlang:timestamp(),
+ clear :: integer()
+ }).
+
+-define(CLEAR_PEM_CACHE, 120000).
+-define(DEFAULT_MAX_SESSION_CACHE, 1000).
+
+%%====================================================================
+%% API
+%%====================================================================
+
+%%--------------------------------------------------------------------
+-spec name(normal | dist) -> atom().
+%%
+%% Description: Returns the registered name of the ssl cache process
+%% in the operation modes 'normal' and 'dist'.
+%%--------------------------------------------------------------------
+name(normal) ->
+ ?MODULE;
+name(dist) ->
+ list_to_atom(atom_to_list(?MODULE) ++ "dist").
+
+%%--------------------------------------------------------------------
+-spec start_link(list()) -> {ok, pid()} | ignore | {error, term()}.
+%%
+%% Description: Starts the ssl pem cache handler
+%%--------------------------------------------------------------------
+start_link(_) ->
+ CacheName = name(normal),
+ gen_server:start_link({local, CacheName},
+ ?MODULE, [CacheName], []).
+
+%%--------------------------------------------------------------------
+-spec start_link_dist(list()) -> {ok, pid()} | ignore | {error, term()}.
+%%
+%% Description: Starts a special instance of the ssl manager to
+%% be used by the erlang distribution. Note disables soft upgrade!
+%%--------------------------------------------------------------------
+start_link_dist(_) ->
+ DistCacheName = name(dist),
+ gen_server:start_link({local, DistCacheName},
+ ?MODULE, [DistCacheName], []).
+
+
+%%--------------------------------------------------------------------
+-spec insert(binary()) -> {ok, term()} | {error, reason()}.
+%%
+%% Description: Cache a pem file and return its content.
+%%--------------------------------------------------------------------
+insert(File) ->
+ {ok, PemBin} = file:read_file(File),
+ Content = public_key:pem_decode(PemBin),
+ case bypass_cache() of
+ true ->
+ {ok, Content};
+ false ->
+ cast({cache_pem, File, Content}),
+ {ok, Content}
+ end.
+
+%%--------------------------------------------------------------------
+-spec clear() -> ok.
+%%
+%% Description: Clear the PEM cache
+%%--------------------------------------------------------------------
+clear() ->
+ %% Not supported for distribution at the moement, should it be?
+ put(ssl_pem_cache, name(normal)),
+ call(unconditionally_clear_pem_cache).
+
+-spec invalidate_pem(File::binary()) -> ok.
+invalidate_pem(File) ->
+ cast({invalidate_pem, File}).
+
+%%====================================================================
+%% gen_server callbacks
+%%====================================================================
+
+%%--------------------------------------------------------------------
+-spec init(list()) -> {ok, #state{}}.
+%% Possible return values not used now.
+%% | {ok, #state{}, timeout()} | ignore | {stop, term()}.
+%%
+%% Description: Initiates the server
+%%--------------------------------------------------------------------
+init([Name]) ->
+ put(ssl_pem_cache, Name),
+ process_flag(trap_exit, true),
+ PemCache = ssl_pkix_db:create_pem_cache(Name),
+ Interval = pem_check_interval(),
+ erlang:send_after(Interval, self(), clear_pem_cache),
+ {ok, #state{pem_cache = PemCache,
+ last_pem_check = os:timestamp(),
+ clear = Interval
+ }}.
+
+%%--------------------------------------------------------------------
+-spec handle_call(msg(), from(), #state{}) -> {reply, reply(), #state{}}.
+%% Possible return values not used now.
+%% {reply, reply(), #state{}, timeout()} |
+%% {noreply, #state{}} |
+%% {noreply, #state{}, timeout()} |
+%% {stop, reason(), reply(), #state{}} |
+%% {stop, reason(), #state{}}.
+%%
+%% Description: Handling call messages
+%%--------------------------------------------------------------------
+handle_call({unconditionally_clear_pem_cache, _},_,
+ #state{pem_cache = PemCache} = State) ->
+ ssl_pkix_db:clear(PemCache),
+ {reply, ok, State}.
+
+%%--------------------------------------------------------------------
+-spec handle_cast(msg(), #state{}) -> {noreply, #state{}}.
+%% Possible return values not used now.
+%% | {noreply, #state{}, timeout()} |
+%% {stop, reason(), #state{}}.
+%%
+%% Description: Handling cast messages
+%%--------------------------------------------------------------------
+handle_cast({cache_pem, File, Content}, #state{pem_cache = Db} = State) ->
+ ssl_pkix_db:insert(File, Content, Db),
+ {noreply, State};
+
+handle_cast({invalidate_pem, File}, #state{pem_cache = Db} = State) ->
+ ssl_pkix_db:remove(File, Db),
+ {noreply, State}.
+
+
+%%--------------------------------------------------------------------
+-spec handle_info(msg(), #state{}) -> {noreply, #state{}}.
+%% Possible return values not used now.
+%% |{noreply, #state{}, timeout()} |
+%% {stop, reason(), #state{}}.
+%%
+%% Description: Handling all non call/cast messages
+%%-------------------------------------------------------------------
+handle_info(clear_pem_cache, #state{pem_cache = PemCache,
+ clear = Interval,
+ last_pem_check = CheckPoint} = State) ->
+ NewCheckPoint = os:timestamp(),
+ start_pem_cache_validator(PemCache, CheckPoint),
+ erlang:send_after(Interval, self(), clear_pem_cache),
+ {noreply, State#state{last_pem_check = NewCheckPoint}};
+
+handle_info(_Info, State) ->
+ {noreply, State}.
+
+%%--------------------------------------------------------------------
+-spec terminate(reason(), #state{}) -> ok.
+%%
+%% Description: This function is called by a gen_server when it is about to
+%% terminate. It should be the opposite of Module:init/1 and do any necessary
+%% cleaning up. When it returns, the gen_server terminates with Reason.
+%% The return value is ignored.
+%%--------------------------------------------------------------------
+terminate(_Reason, #state{}) ->
+ ok.
+
+%%--------------------------------------------------------------------
+-spec code_change(term(), #state{}, list()) -> {ok, #state{}}.
+%%
+%% Description: Convert process state when code is changed
+%%--------------------------------------------------------------------
+code_change(_OldVsn, State, _Extra) ->
+ {ok, State}.
+
+%%--------------------------------------------------------------------
+%%% Internal functions
+%%--------------------------------------------------------------------
+call(Msg) ->
+ gen_server:call(get(ssl_pem_cache), {Msg, self()}, infinity).
+
+cast(Msg) ->
+ gen_server:cast(get(ssl_pem_cache), Msg).
+
+start_pem_cache_validator(PemCache, CheckPoint) ->
+ spawn_link(?MODULE, init_pem_cache_validator,
+ [[get(ssl_pem_cache), PemCache, CheckPoint]]).
+
+init_pem_cache_validator([CacheName, PemCache, CheckPoint]) ->
+ put(ssl_pem_cache, CacheName),
+ ssl_pkix_db:foldl(fun pem_cache_validate/2,
+ CheckPoint, PemCache).
+
+pem_cache_validate({File, _}, CheckPoint) ->
+ case file:read_file_info(File, []) of
+ {ok, #file_info{mtime = Time}} ->
+ case is_before_checkpoint(Time, CheckPoint) of
+ true ->
+ ok;
+ false ->
+ invalidate_pem(File)
+ end;
+ _ ->
+ invalidate_pem(File)
+ end,
+ CheckPoint.
+
+is_before_checkpoint(Time, CheckPoint) ->
+ calendar:datetime_to_gregorian_seconds(
+ calendar:now_to_datetime(CheckPoint)) -
+ calendar:datetime_to_gregorian_seconds(Time) > 0.
+
+pem_check_interval() ->
+ case application:get_env(ssl, ssl_pem_cache_clean) of
+ {ok, Interval} when is_integer(Interval) ->
+ Interval;
+ _ ->
+ ?CLEAR_PEM_CACHE
+ end.
+
+bypass_cache() ->
+ case application:get_env(ssl, bypass_pem_cache) of
+ {ok, Bool} when is_boolean(Bool) ->
+ Bool;
+ _ ->
+ false
+ end.
diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl
index b4299969e4..cde05bb16f 100644
--- a/lib/ssl/src/ssl_pkix_db.erl
+++ b/lib/ssl/src/ssl_pkix_db.erl
@@ -28,11 +28,11 @@
-include_lib("public_key/include/public_key.hrl").
-include_lib("kernel/include/file.hrl").
--export([create/0, add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3,
+-export([create/1, create_pem_cache/1,
+ add_crls/3, remove_crls/2, remove/1, add_trusted_certs/3,
extract_trusted_certs/1,
remove_trusted_certs/2, insert/3, remove/2, clear/1, db_size/1,
ref_count/3, lookup_trusted_cert/4, foldl/3, select_cert_by_issuer/2,
- lookup_cached_pem/2, cache_pem_file/2, cache_pem_file/3,
decode_pem_file/1, lookup/2]).
%%====================================================================
@@ -40,25 +40,31 @@
%%====================================================================
%%--------------------------------------------------------------------
--spec create() -> [db_handle(),...].
+-spec create(atom()) -> [db_handle(),...].
%%
%% Description: Creates a new certificate db.
%% Note: lookup_trusted_cert/4 may be called from any process but only
%% the process that called create may call the other functions.
%%--------------------------------------------------------------------
-create() ->
+create(PEMCacheName) ->
[%% Let connection process delete trusted certs
%% that can only belong to one connection. (Supplied directly
%% on DER format to ssl:connect/listen.)
ets:new(ssl_otp_cacertificate_db, [set, public]),
%% Let connection processes call ref_count/3 directly
- ets:new(ssl_otp_ca_file_ref, [set, public]),
- ets:new(ssl_otp_pem_cache, [set, protected]),
+ {ets:new(ssl_otp_ca_file_ref, [set, public]),
+ ets:new(ssl_otp_ca_ref_file_mapping, [set, protected])
+ },
+ %% Lookups in named table owned by ssl_pem_cache process
+ PEMCacheName,
%% Default cache
{ets:new(ssl_otp_crl_cache, [set, protected]),
ets:new(ssl_otp_crl_issuer_mapping, [bag, protected])}
].
+create_pem_cache(Name) ->
+ ets:new(Name, [named_table, set, protected]).
+
%%--------------------------------------------------------------------
-spec remove([db_handle()]) -> ok.
%%
@@ -70,6 +76,10 @@ remove(Dbs) ->
true = ets:delete(Db1);
(undefined) ->
ok;
+ (ssl_pem_cache) ->
+ ok;
+ (ssl_pem_cache_dist) ->
+ ok;
(Db) ->
true = ets:delete(Db)
end, Dbs).
@@ -101,11 +111,6 @@ lookup_trusted_cert(_DbHandle, {extracted,Certs}, SerialNumber, Issuer) ->
{ok, Cert}
end.
-lookup_cached_pem([_, _, PemChache | _], File) ->
- lookup_cached_pem(PemChache, File);
-lookup_cached_pem(PemChache, File) ->
- lookup(File, PemChache).
-
%%--------------------------------------------------------------------
-spec add_trusted_certs(pid(), {erlang:timestamp(), string()} |
{der, list()}, [db_handle()]) -> {ok, [db_handle()]}.
@@ -122,17 +127,11 @@ add_trusted_certs(_Pid, {der, DerList}, [CertDb, _,_ | _]) ->
add_certs_from_der(DerList, NewRef, CertDb),
{ok, NewRef};
-add_trusted_certs(_Pid, File, [CertsDb, RefDb, PemChache | _] = Db) ->
- case lookup_cached_pem(Db, File) of
- [{_Content, Ref}] ->
+add_trusted_certs(_Pid, File, [ _, {RefDb, FileMapDb} | _] = Db) ->
+ case lookup(File, FileMapDb) of
+ [Ref] ->
ref_count(Ref, RefDb, 1),
{ok, Ref};
- [Content] ->
- Ref = make_ref(),
- update_counter(Ref, 1, RefDb),
- insert(File, {Content, Ref}, PemChache),
- add_certs_from_pem(Content, Ref, CertsDb),
- {ok, Ref};
undefined ->
new_trusted_cert_entry(File, Db)
end.
@@ -151,25 +150,6 @@ extract_trusted_certs(File) ->
{error, {badmatch, Error}}
end.
-%%--------------------------------------------------------------------
-%%
-%% Description: Cache file as binary in DB
-%%--------------------------------------------------------------------
--spec cache_pem_file(binary(), [db_handle()]) -> {ok, term()}.
-cache_pem_file(File, [_CertsDb, _RefDb, PemChache | _]) ->
- {ok, PemBin} = file:read_file(File),
- Content = public_key:pem_decode(PemBin),
- insert(File, Content, PemChache),
- {ok, Content}.
-
-
--spec cache_pem_file(reference(), binary(), [db_handle()]) -> {ok, term()}.
-cache_pem_file(Ref, File, [_CertsDb, _RefDb, PemChache| _]) ->
- {ok, PemBin} = file:read_file(File),
- Content = public_key:pem_decode(PemBin),
- insert(File, {Content, Ref}, PemChache),
- {ok, Content}.
-
-spec decode_pem_file(binary()) -> {ok, term()}.
decode_pem_file(File) ->
case file:read_file(File) of
@@ -246,6 +226,8 @@ select_cert_by_issuer(Cache, Issuer) ->
%%--------------------------------------------------------------------
ref_count({extracted, _}, _Db, _N) ->
not_cached;
+ref_count(Key, {Db, _}, N) ->
+ ref_count(Key, Db, N);
ref_count(Key, Db, N) ->
ets:update_counter(Db,Key,N).
@@ -278,9 +260,9 @@ insert(Key, Data, Db) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-update_counter(Key, Count, Db) ->
- true = ets:insert(Db, {Key, Count}),
- ok.
+init_ref_db(Ref, File, {RefDb, FileMapDb}) ->
+ true = ets:insert(RefDb, {Ref, 1}),
+ true = ets:insert(FileMapDb, {File, Ref}).
remove_certs(Ref, CertsDb) ->
true = ets:match_delete(CertsDb, {{Ref, '_', '_'}, '_'}),
@@ -326,10 +308,10 @@ decode_certs(Ref, Cert) ->
undefined
end.
-new_trusted_cert_entry(File, [CertsDb, RefDb, _ | _] = Db) ->
+new_trusted_cert_entry(File, [CertsDb, RefsDb, _ | _]) ->
Ref = make_ref(),
- update_counter(Ref, 1, RefDb),
- {ok, Content} = cache_pem_file(Ref, File, Db),
+ init_ref_db(Ref, File, RefsDb),
+ {ok, Content} = ssl_pem_cache:insert(File),
add_certs_from_pem(Content, Ref, CertsDb),
{ok, Ref}.
diff --git a/lib/ssl/src/ssl_record.erl b/lib/ssl/src/ssl_record.erl
index b10069c3cb..539e189c4f 100644
--- a/lib/ssl/src/ssl_record.erl
+++ b/lib/ssl/src/ssl_record.erl
@@ -67,7 +67,7 @@
connection_state().
%%
%% Description: Returns the instance of the connection_state map
-%% that is currently defined as the current conection state.
+%% that is currently defined as the current connection state.
%%--------------------------------------------------------------------
current_connection_state(ConnectionStates, read) ->
maps:get(current_read, ConnectionStates);
@@ -79,7 +79,7 @@ current_connection_state(ConnectionStates, write) ->
connection_state().
%%
%% Description: Returns the instance of the connection_state map
-%% that is pendingly defined as the pending conection state.
+%% that is pendingly defined as the pending connection state.
%%--------------------------------------------------------------------
pending_connection_state(ConnectionStates, read) ->
maps:get(pending_read, ConnectionStates);
diff --git a/lib/ssl/src/ssl_sup.erl b/lib/ssl/src/ssl_sup.erl
index 8245801139..05a7aaaa82 100644
--- a/lib/ssl/src/ssl_sup.erl
+++ b/lib/ssl/src/ssl_sup.erl
@@ -25,7 +25,7 @@
-behaviour(supervisor).
%% API
--export([start_link/0, manager_opts/0]).
+-export([start_link/0]).
%% Supervisor callback
-export([init/1]).
@@ -44,90 +44,28 @@ start_link() ->
%%%=========================================================================
init([]) ->
- SessionCertManager = session_and_cert_manager_child_spec(),
- TLSConnetionManager = tls_connection_manager_child_spec(),
- %% Handles emulated options so that they inherited by the accept
- %% socket, even when setopts is performed on the listen socket
- ListenOptionsTracker = listen_options_tracker_child_spec(),
-
- DTLSConnetionManager = dtls_connection_manager_child_spec(),
- DTLSUdpListeners = dtls_udp_listeners_spec(),
+ {ok, {{rest_for_one, 10, 3600}, [ssl_admin_child_spec(),
+ ssl_connection_sup()
+ ]}}.
- {ok, {{one_for_all, 10, 3600}, [SessionCertManager, TLSConnetionManager,
- ListenOptionsTracker,
- DTLSConnetionManager, DTLSUdpListeners
- ]}}.
-
-
-manager_opts() ->
- CbOpts = case application:get_env(ssl, session_cb) of
- {ok, Cb} when is_atom(Cb) ->
- InitArgs = session_cb_init_args(),
- [{session_cb, Cb}, {session_cb_init_args, InitArgs}];
- _ ->
- []
- end,
- case application:get_env(ssl, session_lifetime) of
- {ok, Time} when is_integer(Time) ->
- [{session_lifetime, Time}| CbOpts];
- _ ->
- CbOpts
- end.
-
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-
-session_and_cert_manager_child_spec() ->
- Opts = manager_opts(),
- Name = ssl_manager,
- StartFunc = {ssl_manager, start_link, [Opts]},
+ssl_admin_child_spec() ->
+ Name = ssl_admin_sup,
+ StartFunc = {ssl_admin_sup, start_link, []},
Restart = permanent,
Shutdown = 4000,
- Modules = [ssl_manager],
- Type = worker,
- {Name, StartFunc, Restart, Shutdown, Type, Modules}.
-
-tls_connection_manager_child_spec() ->
- Name = tls_connection,
- StartFunc = {tls_connection_sup, start_link, []},
- Restart = permanent,
- Shutdown = 4000,
- Modules = [tls_connection_sup],
+ Modules = [ssl_admin_sup],
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-dtls_connection_manager_child_spec() ->
- Name = dtls_connection,
- StartFunc = {dtls_connection_sup, start_link, []},
+ssl_connection_sup() ->
+ Name = ssl_connection_sup,
+ StartFunc = {ssl_connection_sup, start_link, []},
Restart = permanent,
Shutdown = 4000,
- Modules = [dtls_connection_sup],
- Type = supervisor,
- {Name, StartFunc, Restart, Shutdown, Type, Modules}.
-
-listen_options_tracker_child_spec() ->
- Name = tls_socket,
- StartFunc = {ssl_listen_tracker_sup, start_link, []},
- Restart = permanent,
- Shutdown = 4000,
- Modules = [tls_socket],
- Type = supervisor,
- {Name, StartFunc, Restart, Shutdown, Type, Modules}.
-
-dtls_udp_listeners_spec() ->
- Name = dtls_udp_listener,
- StartFunc = {dtls_udp_sup, start_link, []},
- Restart = permanent,
- Shutdown = 4000,
- Modules = [],
+ Modules = [ssl_connection_sup],
Type = supervisor,
{Name, StartFunc, Restart, Shutdown, Type, Modules}.
-session_cb_init_args() ->
- case application:get_env(ssl, session_cb_init_args) of
- {ok, Args} when is_list(Args) ->
- Args;
- _ ->
- []
- end.
diff --git a/lib/ssl/src/tls_handshake.erl b/lib/ssl/src/tls_handshake.erl
index 2800ee6537..5726561865 100644
--- a/lib/ssl/src/tls_handshake.erl
+++ b/lib/ssl/src/tls_handshake.erl
@@ -88,7 +88,7 @@ client_hello(Host, Port, ConnectionStates,
#hello_extensions{}, {ssl_cipher:hash(), ssl_cipher:sign_algo()} | undefined} |
#alert{}.
%%
-%% Description: Handles a recieved hello message
+%% Description: Handles a received hello message
%%--------------------------------------------------------------------
hello(#server_hello{server_version = Version, random = Random,
cipher_suite = CipherSuite,
@@ -192,7 +192,8 @@ handle_client_hello(Version, #client_hello{session_id = SugesstedId,
end.
get_tls_handshake_aux(Version, <<?BYTE(Type), ?UINT24(Length),
- Body:Length/binary,Rest/binary>>, #ssl_options{v2_hello_compatible = V2Hello} = Opts, Acc) ->
+ Body:Length/binary,Rest/binary>>,
+ #ssl_options{v2_hello_compatible = V2Hello} = Opts, Acc) ->
Raw = <<?BYTE(Type), ?UINT24(Length), Body/binary>>,
try decode_handshake(Version, Type, Body, V2Hello) of
Handshake ->
@@ -207,27 +208,17 @@ get_tls_handshake_aux(_Version, Data, _, Acc) ->
decode_handshake(_, ?HELLO_REQUEST, <<>>, _) ->
#hello_request{};
-%% Client hello v2.
-%% The server must be able to receive such messages, from clients that
-%% are willing to use ssl v3 or higher, but have ssl v2 compatibility.
-decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor),
- ?UINT16(CSLength), ?UINT16(0),
- ?UINT16(CDLength),
- CipherSuites:CSLength/binary,
- ChallengeData:CDLength/binary>>, true) ->
- #client_hello{client_version = {Major, Minor},
- random = ssl_v2:client_random(ChallengeData, CDLength),
- session_id = 0,
- cipher_suites = ssl_handshake:decode_suites('3_bytes', CipherSuites),
- compression_methods = [?NULL],
- extensions = #hello_extensions{}
- };
-decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(_), ?BYTE(_),
- ?UINT16(CSLength), ?UINT16(0),
- ?UINT16(CDLength),
- _CipherSuites:CSLength/binary,
- _ChallengeData:CDLength/binary>>, false) ->
- throw(?ALERT_REC(?FATAL, ?PROTOCOL_VERSION, ssl_v2_client_hello_no_supported));
+decode_handshake(_Version, ?CLIENT_HELLO, Bin, true) ->
+ try decode_hello(Bin) of
+ Hello ->
+ Hello
+ catch
+ _:_ ->
+ decode_v2_hello(Bin)
+ end;
+decode_handshake(_Version, ?CLIENT_HELLO, Bin, false) ->
+ decode_hello(Bin);
+
decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor), Random:32/binary,
?BYTE(SID_length), Session_ID:SID_length/binary,
?UINT16(Cs_length), CipherSuites:Cs_length/binary,
@@ -244,10 +235,40 @@ decode_handshake(_Version, ?CLIENT_HELLO, <<?BYTE(Major), ?BYTE(Minor), Random:3
compression_methods = Comp_methods,
extensions = DecodedExtensions
};
-
decode_handshake(Version, Tag, Msg, _) ->
ssl_handshake:decode_handshake(Version, Tag, Msg).
+
+decode_hello(<<?BYTE(Major), ?BYTE(Minor), Random:32/binary,
+ ?BYTE(SID_length), Session_ID:SID_length/binary,
+ ?UINT16(Cs_length), CipherSuites:Cs_length/binary,
+ ?BYTE(Cm_length), Comp_methods:Cm_length/binary,
+ Extensions/binary>>) ->
+ DecodedExtensions = ssl_handshake:decode_hello_extensions({client, Extensions}),
+
+ #client_hello{
+ client_version = {Major,Minor},
+ random = Random,
+ session_id = Session_ID,
+ cipher_suites = ssl_handshake:decode_suites('2_bytes', CipherSuites),
+ compression_methods = Comp_methods,
+ extensions = DecodedExtensions
+ }.
+%% The server must be able to receive such messages, from clients that
+%% are willing to use ssl v3 or higher, but have ssl v2 compatibility.
+decode_v2_hello(<<?BYTE(Major), ?BYTE(Minor),
+ ?UINT16(CSLength), ?UINT16(0),
+ ?UINT16(CDLength),
+ CipherSuites:CSLength/binary,
+ ChallengeData:CDLength/binary>>) ->
+ #client_hello{client_version = {Major, Minor},
+ random = ssl_v2:client_random(ChallengeData, CDLength),
+ session_id = 0,
+ cipher_suites = ssl_handshake:decode_suites('3_bytes', CipherSuites),
+ compression_methods = [?NULL],
+ extensions = #hello_extensions{}
+ }.
+
enc_handshake(#hello_request{}, _Version) ->
{?HELLO_REQUEST, <<>>};
enc_handshake(#client_hello{client_version = {Major, Minor},
diff --git a/lib/ssl/src/tls_v1.erl b/lib/ssl/src/tls_v1.erl
index 7f24ce5192..e2e224ab0c 100644
--- a/lib/ssl/src/tls_v1.erl
+++ b/lib/ssl/src/tls_v1.erl
@@ -407,7 +407,7 @@ is_pair(Hash, rsa, Hashs) ->
AtLeastMd5 = Hashs -- [md2,md4],
lists:member(Hash, AtLeastMd5).
-%% list ECC curves in prefered order
+%% list ECC curves in preferred order
-spec ecc_curves(1..3 | all) -> [named_curve()].
ecc_curves(all) ->
[sect571r1,sect571k1,secp521r1,brainpoolP512r1,
diff --git a/lib/ssl/test/make_certs.erl b/lib/ssl/test/make_certs.erl
index d85be6c69e..e14f7f60c4 100644
--- a/lib/ssl/test/make_certs.erl
+++ b/lib/ssl/test/make_certs.erl
@@ -172,8 +172,8 @@ revoke(Root, CA, User, C) ->
gencrl(Root, CA, C).
gencrl(Root, CA, C) ->
- %% By default, the CRL is valid for 24 hours from now.
- gencrl(Root, CA, C, 24).
+ %% By default, the CRL is valid for a week from now.
+ gencrl(Root, CA, C, 24*7).
gencrl(Root, CA, C, CrlHours) ->
CACnfFile = filename:join([Root, CA, "ca.cnf"]),
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index de5895d7ba..f0a3c42e8d 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -961,9 +961,9 @@ clear_pem_cache(Config) when is_list(Config) ->
{status, _, _, StatusInfo} = sys:get_status(whereis(ssl_manager)),
[_, _,_, _, Prop] = StatusInfo,
State = ssl_test_lib:state(Prop),
- [_,FilRefDb |_] = element(6, State),
+ [_,{FilRefDb, _} |_] = element(6, State),
{Server, Client} = basic_verify_test_no_close(Config),
- CountReferencedFiles = fun({_,-1}, Acc) ->
+ CountReferencedFiles = fun({_, -1}, Acc) ->
Acc;
({_, N}, Acc) ->
N + Acc
diff --git a/lib/ssl/test/ssl_bench_SUITE.erl b/lib/ssl/test/ssl_bench_SUITE.erl
index 21989f8d99..70fd0af9b4 100644
--- a/lib/ssl/test/ssl_bench_SUITE.erl
+++ b/lib/ssl/test/ssl_bench_SUITE.erl
@@ -88,7 +88,6 @@ end_per_testcase(_Func, _Conf) ->
-define(FPROF_SERVER, false).
-define(EPROF_CLIENT, false).
-define(EPROF_SERVER, false).
--define(PERCEPT_SERVER, false).
%% Current numbers gives roughly a testcase per minute on todays hardware..
@@ -190,7 +189,6 @@ server_init(ssl, setup_connection, _, _, Server) ->
?FPROF_SERVER andalso start_profile(fprof, [whereis(ssl_manager), new]),
%%?EPROF_SERVER andalso start_profile(eprof, [ssl_connection_sup, ssl_manager]),
?EPROF_SERVER andalso start_profile(eprof, [ssl_manager]),
- ?PERCEPT_SERVER andalso percept:profile("/tmp/ssl_server.percept"),
Server ! {self(), {init, Host, Port}},
Test = fun(TSocket) ->
ok = ssl:ssl_accept(TSocket),
@@ -247,7 +245,6 @@ setup_server_connection(LSocket, Test) ->
receive quit ->
?FPROF_SERVER andalso stop_profile(fprof, "test_server_res.fprof"),
?EPROF_SERVER andalso stop_profile(eprof, "test_server_res.eprof"),
- ?PERCEPT_SERVER andalso stop_profile(percept, "/tmp/ssl_server.percept"),
ok
after 0 ->
case ssl:transport_accept(LSocket, 2000) of
@@ -388,13 +385,6 @@ start_profile(fprof, Procs) ->
fprof:trace([start, {procs, Procs}]),
io:format("(F)Profiling ...",[]).
-stop_profile(percept, File) ->
- percept:stop_profile(),
- percept:analyze(File),
- {started, _Host, Port} = percept:start_webserver(),
- wx:new(),
- wx_misc:launchDefaultBrowser("http://" ++ net_adm:localhost() ++ ":" ++ integer_to_list(Port)),
- ok;
stop_profile(eprof, File) ->
profiling_stopped = eprof:stop_profiling(),
eprof:log(File),
diff --git a/lib/ssl/test/ssl_handshake_SUITE.erl b/lib/ssl/test/ssl_handshake_SUITE.erl
index 74b14145dd..0a50c98a28 100644
--- a/lib/ssl/test/ssl_handshake_SUITE.erl
+++ b/lib/ssl/test/ssl_handshake_SUITE.erl
@@ -33,6 +33,7 @@
%% Common Test interface functions -----------------------------------
%%--------------------------------------------------------------------
all() -> [decode_hello_handshake,
+ decode_hello_handshake_version_confusion,
decode_single_hello_extension_correctly,
decode_supported_elliptic_curves_hello_extension_correctly,
decode_unknown_hello_extension_correctly,
@@ -106,6 +107,14 @@ decode_hello_handshake(_Config) ->
#renegotiation_info{renegotiated_connection = <<0>>}
= (Hello#server_hello.extensions)#hello_extensions.renegotiation_info.
+
+decode_hello_handshake_version_confusion(_) ->
+ HelloPacket = <<3,3,0,0,0,0,0,63,210,235,149,6,244,140,108,13,177,74,16,218,33,108,219,41,73,228,3,82,132,123,73,144,118,100,0,0,32,192,4,0,10,192,45,192,38,0,47,192,18,0,163,0,22,0,165,192,29,192,18,192,30,0,103,0,57,192,48,0,47,1,0>>,
+ Version = {3,3},
+ ClientHello = 1,
+ Hello = tls_handshake:decode_handshake({3,3}, ClientHello, HelloPacket, false),
+ Hello = tls_handshake:decode_handshake({3,3}, ClientHello, HelloPacket, true).
+
decode_single_hello_extension_correctly(_Config) ->
Renegotiation = <<?UINT16(?RENEGOTIATION_EXT), ?UINT16(1), 0>>,
Extensions = ssl_handshake:decode_hello_extensions(Renegotiation),
diff --git a/lib/ssl/test/ssl_pem_cache_SUITE.erl b/lib/ssl/test/ssl_pem_cache_SUITE.erl
index f10d27fbc6..96b15d9b51 100644
--- a/lib/ssl/test/ssl_pem_cache_SUITE.erl
+++ b/lib/ssl/test/ssl_pem_cache_SUITE.erl
@@ -82,8 +82,8 @@ pem_cleanup() ->
[{doc, "Test pem cache invalidate mechanism"}].
pem_cleanup(Config)when is_list(Config) ->
process_flag(trap_exit, true),
- ClientOpts = proplists:get_value(client_opts, Config),
- ServerOpts = proplists:get_value(server_opts, Config),
+ ClientOpts = proplists:get_value(client_verification_opts, Config),
+ ServerOpts = proplists:get_value(server_verification_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server =
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index 9632103696..49d2b5c1b8 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -278,8 +278,11 @@ check_result(Server, ServerMsg, Client, ClientMsg) ->
check_result(Server, ServerMsg);
{Port, {data,Debug}} when is_port(Port) ->
- ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
+ ct:log("~p:~p~n Openssl ~s~n",[?MODULE,?LINE, Debug]),
check_result(Server, ServerMsg, Client, ClientMsg);
+ {Port,closed} when is_port(Port) ->
+ ct:log("~p:~p~n Openssl port ~n",[?MODULE,?LINE]),
+ check_result(Server, ServerMsg, Client, ClientMsg);
Unexpected ->
Reason = {{expected, {Client, ClientMsg}},
{expected, {Server, ServerMsg}}, {got, Unexpected}},
@@ -291,11 +294,11 @@ check_result(Pid, Msg) ->
{Pid, Msg} ->
ok;
{Port, {data,Debug}} when is_port(Port) ->
- ct:log("~p:~p~nopenssl ~s~n",[?MODULE,?LINE, Debug]),
+ ct:log("~p:~p~n Openssl ~s~n",[?MODULE,?LINE, Debug]),
check_result(Pid,Msg);
- %% {Port, {exit_status, Status}} when is_port(Port) ->
- %% ct:log("~p:~p Exit status: ~p~n",[?MODULE,?LINE, Status]),
- %% check_result(Pid, Msg);
+ {Port,closed} when is_port(Port)->
+ ct:log("~p:~p Openssl port closed ~n",[?MODULE,?LINE]),
+ check_result(Pid, Msg);
Unexpected ->
Reason = {{expected, {Pid, Msg}},
{got, Unexpected}},
diff --git a/lib/stdlib/doc/src/c.xml b/lib/stdlib/doc/src/c.xml
index 55a77d1bc5..7666699183 100644
--- a/lib/stdlib/doc/src/c.xml
+++ b/lib/stdlib/doc/src/c.xml
@@ -52,13 +52,27 @@
<func>
<name name="c" arity="1"/>
<name name="c" arity="2"/>
- <fsummary>Compile and load code in a file.</fsummary>
+ <name name="c" arity="3"/>
+ <fsummary>Compile and load a file or module.</fsummary>
<desc>
- <p>Compiles and then purges and loads the code for a file.
- <c><anno>Options</anno></c> defaults to <c>[]</c>. Compilation is
- equivalent to:</p>
- <code type="none">
-compile:file(<anno>File</anno>, <anno>Options</anno> ++ [report_errors, report_warnings])</code>
+ <p>Compiles and then purges and loads the code for a module.
+ <c><anno>Module</anno></c> can be either a module name or a source
+ file path, with or without <c>.erl</c> extension.
+ <c><anno>Options</anno></c> defaults to <c>[]</c>.</p>
+ <p>If <c><anno>Module</anno></c> is an atom and is not the path of a
+ source file, then the code path is searched to locate the object
+ file for the module and extract its original compiler options and
+ source path. If the source file is not found in the original
+ location, <seealso
+ marker="filelib#find_source/1"><c>filelib:find_source/1</c></seealso>
+ is used to search for it relative to the directory of the object
+ file.</p>
+ <p>The source file is compiled with the the original
+ options appended to the given <c><anno>Options</anno></c>, the
+ output replacing the old object file if and only if compilation
+ succeeds. A function <c><anno>Filter</anno></c> can be specified
+ for removing elements from from the original compiler options
+ before the new options are added.</p>
<p>Notice that purging the code means that any processes
lingering in old code for the module are killed without
warning. For more information, see <c>code/3</c>.</p>
diff --git a/lib/stdlib/doc/src/erl_tar.xml b/lib/stdlib/doc/src/erl_tar.xml
index 24e7b64b9e..f28d8b425b 100644
--- a/lib/stdlib/doc/src/erl_tar.xml
+++ b/lib/stdlib/doc/src/erl_tar.xml
@@ -37,12 +37,13 @@
</modulesummary>
<description>
<p>This module archives and extract files to and from
- a tar file. This module supports the <c>ustar</c> format
- (IEEE Std 1003.1 and ISO/IEC&nbsp;9945-1). All modern <c>tar</c>
- programs (including GNU tar) can read this format. To ensure that
- that GNU tar produces a tar file that <c>erl_tar</c> can read,
- specify option <c>--format=ustar</c> to GNU tar.</p>
-
+ a tar file. This module supports reading most common tar formats,
+ namely v7, STAR, USTAR, and PAX, as well as some of GNU tar's extensions
+ to the USTAR format (sparse files most notably). It produces tar archives
+ in USTAR format, unless the files being archived require PAX format due to
+ restrictions in USTAR (such as unicode metadata, filename length, and more).
+ As such, <c>erl_tar</c> supports tar archives produced by most all modern
+ tar utilities, and produces tarballs which should be similarly portable.</p>
<p>By convention, the name of a tar file is to end in "<c>.tar</c>".
To abide to the convention, add "<c>.tar</c>" to the name.</p>
@@ -83,6 +84,8 @@
<p>If <seealso marker="kernel:file#native_name_encoding/0">
<c>file:native_name_encoding/0</c></seealso>
returns <c>latin1</c>, no translation of path names is done.</p>
+
+ <p>Unicode metadata stored in PAX headers is preserved</p>
</section>
<section>
@@ -104,21 +107,20 @@
<title>Limitations</title>
<list type="bulleted">
<item>
- <p>For maximum compatibility, it is safe to archive files with names
- up to 100 characters in length. Such tar files can generally be
- extracted by any <c>tar</c> program.</p>
- </item>
- <item>
- <p>For filenames exceeding 100 characters in length, the resulting tar
- file can only be correctly extracted by a POSIX-compatible <c>tar</c>
- program (such as Solaris <c>tar</c> or a modern GNU <c>tar</c>).</p>
- </item>
- <item>
- <p>Files with longer names than 256 bytes cannot be stored.</p>
+ <p>If you must remain compatible with the USTAR tar format, you must ensure file paths being
+ stored are less than 255 bytes in total, with a maximum filename component
+ length of 100 bytes. USTAR uses a header field (prefix) in addition to the name field, and
+ splits file paths longer than 100 bytes into two parts. This split is done on a directory boundary,
+ and is done in such a way to make the best use of the space available in those two fields, but in practice
+ this will often mean that you have less than 255 bytes for a path. <c>erl_tar</c> will
+ automatically upgrade the format to PAX to handle longer filenames, so this is only an issue if you
+ need to extract the archive with an older implementation of <c>erl_tar</c> or <c>tar</c> which does
+ not support PAX. In this case, the PAX headers will be extracted as regular files, and you will need to
+ apply them manually.</p>
</item>
<item>
- <p>The file name a symbolic link points is always limited
- to 100 characters.</p>
+ <p>Like the above, if you must remain USTAR compatible, you must also ensure than paths for
+ symbolic/hard links are no more than 100 bytes, otherwise PAX headers will be used.</p>
</item>
</list>
</section>
@@ -129,7 +131,9 @@
<fsummary>Add a file to an open tar file.</fsummary>
<type>
<v>TarDescriptor = term()</v>
- <v>Filename = filename()</v>
+ <v>FilenameOrBin = filename()|binary()</v>
+ <v>NameInArchive = filename()</v>
+ <v>Filename = filename()|{NameInArchive,FilenameOrBin}</v>
<v>Options = [Option]</v>
<v>Option = dereference|verbose|{chunks,ChunkSize}</v>
<v>ChunkSize = positive_integer()</v>
@@ -139,6 +143,9 @@
<desc>
<p>Adds a file to a tar file that has been opened for writing by
<seealso marker="#open/2"><c>open/1</c></seealso>.</p>
+ <p><c>NameInArchive</c> is the name under which the file becomes
+ stored in the tar file. The file gets this name when it is
+ extracted from the tar file.</p>
<p>Options:</p>
<taglist>
<tag><c>dereference</c></tag>
@@ -183,9 +190,6 @@
<seealso marker="#open/2"><c>open/2</c></seealso>. This function
accepts the same options as
<seealso marker="#add/3"><c>add/3</c></seealso>.</p>
- <p><c>NameInArchive</c> is the name under which the file becomes
- stored in the tar file. The file gets this name when it is
- extracted from the tar file.</p>
</desc>
</func>
@@ -206,8 +210,8 @@
<fsummary>Create a tar archive.</fsummary>
<type>
<v>Name = filename()</v>
- <v>FileList = [Filename|{NameInArchive, binary()},{NameInArchive,
- Filename}]</v>
+ <v>FileList = [Filename|{NameInArchive, FilenameOrBin}]</v>
+ <v>FilenameOrBin = filename()|binary()</v>
<v>Filename = filename()</v>
<v>NameInArchive = filename()</v>
<v>RetValue = ok|{error,{Name,Reason}}</v>
@@ -225,8 +229,8 @@
<fsummary>Create a tar archive with options.</fsummary>
<type>
<v>Name = filename()</v>
- <v>FileList = [Filename|{NameInArchive, binary()},{NameInArchive,
- Filename}]</v>
+ <v>FileList = [Filename|{NameInArchive, FilenameOrBin}]</v>
+ <v>FilenameOrBin = filename()|binary()</v>
<v>Filename = filename()</v>
<v>NameInArchive = filename()</v>
<v>OptionList = [Option]</v>
@@ -275,7 +279,8 @@
<name>extract(Name) -> RetValue</name>
<fsummary>Extract all files from a tar file.</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename() | {binary,binary()} | {file,Fd}</v>
+ <v>Fd = file_descriptor()</v>
<v>RetValue = ok|{error,{Name,Reason}}</v>
<v>Reason = term()</v>
</type>
@@ -294,8 +299,7 @@
<name>extract(Name, OptionList)</name>
<fsummary>Extract files from a tar file.</fsummary>
<type>
- <v>Name = filename() | {binary,Binary} | {file,Fd}</v>
- <v>Binary = binary()</v>
+ <v>Name = filename() | {binary,binary()} | {file,Fd}</v>
<v>Fd = file_descriptor()</v>
<v>OptionList = [Option]</v>
<v>Option = {cwd,Cwd}|{files,FileList}|keep_old_files|verbose|memory</v>
@@ -521,7 +525,7 @@ erl_tar:close(TarDesc)</code>
<name>table(Name) -> RetValue</name>
<fsummary>Retrieve the name of all files in a tar file.</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v>
<v>RetValue = {ok,[string()]}|{error,{Name,Reason}}</v>
<v>Reason = term()</v>
</type>
@@ -535,7 +539,7 @@ erl_tar:close(TarDesc)</code>
<fsummary>Retrieve name and information of all files in a tar file.
</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v>
</type>
<desc>
<p>Retrieves the names of all files in the tar file <c>Name</c>.</p>
@@ -546,7 +550,7 @@ erl_tar:close(TarDesc)</code>
<name>t(Name)</name>
<fsummary>Print the name of each file in a tar file.</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v>
</type>
<desc>
<p>Prints the names of all files in the tar file <c>Name</c> to the
@@ -559,7 +563,7 @@ erl_tar:close(TarDesc)</code>
<fsummary>Print name and information for each file in a tar file.
</fsummary>
<type>
- <v>Name = filename()</v>
+ <v>Name = filename()|{binary,binary()}|{file,file_descriptor()}</v>
</type>
<desc>
<p>Prints names and information about all files in the tar file
diff --git a/lib/stdlib/doc/src/filelib.xml b/lib/stdlib/doc/src/filelib.xml
index 7c6380ce28..ad73fc254a 100644
--- a/lib/stdlib/doc/src/filelib.xml
+++ b/lib/stdlib/doc/src/filelib.xml
@@ -60,6 +60,12 @@
<datatype>
<name name="filename_all"/>
</datatype>
+ <datatype>
+ <name name="find_file_rule"/>
+ </datatype>
+ <datatype>
+ <name name="find_source_rule"/>
+ </datatype>
</datatypes>
<funcs>
@@ -226,7 +232,51 @@ filelib:wildcard("lib/**/*.{erl,hrl}")</code>
directory.</p>
</desc>
</func>
+
+ <func>
+ <name name="find_file" arity="2"/>
+ <name name="find_file" arity="3"/>
+ <fsummary>Find a file relative to a given directory.</fsummary>
+ <desc>
+ <p>Looks for a file of the given name by applying suffix rules to
+ the given directory path. For example, a rule <c>{"ebin", "src"}</c>
+ means that if the directory path ends with <c>"ebin"</c>, the
+ corresponding path ending in <c>"src"</c> should be searched.</p>
+ <p>If <c><anno>Rules</anno></c> is left out or is an empty list, the
+ default system rules are used. See also the Kernel application
+ parameter <seealso
+ marker="kernel:kernel_app#source_search_rules"><c>source_search_rules</c></seealso>.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="find_source" arity="1"/>
+ <fsummary>Find the source file for a given object file.</fsummary>
+ <desc>
+ <p>Equivalent to <c>find_source(Base, Dir)</c>, where <c>Dir</c> is
+ <c>filename:dirname(<anno>FilePath</anno>)</c> and <c>Base</c> is
+ <c>filename:basename(<anno>FilePath</anno>)</c>.</p>
+ </desc>
+ </func>
+ <func>
+ <name name="find_source" arity="2"/>
+ <name name="find_source" arity="3"/>
+ <fsummary>Find a source file relative to a given directory.</fsummary>
+ <desc>
+ <p>Applies file extension specific rules to find the source file for
+ a given object file relative to the object directory. For example,
+ for a file with the extension <c>.beam</c>, the default rule is to
+ look for a file with a corresponding extension <c>.erl</c> by
+ replacing the suffix <c>"ebin"</c> of the object directory path with
+ <c>"src"</c>.
+ The file search is done through <seealso
+ marker="#find_file/3"><c>find_file/3</c></seealso>. The directory of
+ the object file is always tried before any other directory specified
+ by the rules.</p>
+ <p>If <c><anno>Rules</anno></c> is left out or is an empty list, the
+ default system rules are used. See also the Kernel application
+ parameter <seealso
+ marker="kernel:kernel_app#source_search_rules"><c>source_search_rules</c></seealso>.</p>
+ </desc>
+ </func>
</funcs>
</erlref>
-
-
diff --git a/lib/stdlib/doc/src/filename.xml b/lib/stdlib/doc/src/filename.xml
index 2a413835d0..7acef51ca1 100644
--- a/lib/stdlib/doc/src/filename.xml
+++ b/lib/stdlib/doc/src/filename.xml
@@ -356,10 +356,12 @@ true
<p>Finds the source filename and compiler options for a module.
The result can be fed to <seealso marker="compiler:compile#file/2">
<c>compile:file/2</c></seealso> to compile the file again.</p>
- <warning><p>It is not recommended to use this function. If possible,
- use the <seealso marker="beam_lib"><c>beam_lib(3)</c></seealso>
- module to extract the abstract code format from the Beam file and
- compile that instead.</p></warning>
+ <warning>
+ <p>This function is deprecated. Use <seealso marker="filelib#find_source/1">
+ <c>filelib:find_source/1</c></seealso> instead for finding source files.</p>
+ <p>If possible, use the <seealso marker="beam_lib"><c>beam_lib(3)</c></seealso>
+ module to extract the compiler options and the abstract code
+ format from the Beam file and compile that instead.</p></warning>
<p>Argument <c><anno>Beam</anno></c>, which can be a string or an atom,
specifies either the module name or the path to the source
code, with or without extension <c>".erl"</c>. In either
diff --git a/lib/stdlib/doc/src/shell.xml b/lib/stdlib/doc/src/shell.xml
index d6e8036d4e..f52bc39deb 100644
--- a/lib/stdlib/doc/src/shell.xml
+++ b/lib/stdlib/doc/src/shell.xml
@@ -165,12 +165,12 @@
<item>
<p>Evaluates <c>shell_default:help()</c>.</p>
</item>
- <tag><c>c(File)</c></tag>
+ <tag><c>c(Mod)</c></tag>
<item>
- <p>Evaluates <c>shell_default:c(File)</c>. This compiles
- and loads code in <c>File</c> and purges old versions of
- code, if necessary. Assumes that the file and module names
- are the same.</p>
+ <p>Evaluates <c>shell_default:c(Mod)</c>. This compiles and
+ loads the module <c>Mod</c> and purges old versions of the
+ code, if necessary. <c>Mod</c> can be either a module name or a
+ a source file path, with or without <c>.erl</c> extension.</p>
</item>
<tag><c>catch_exception(Bool)</c></tag>
<item>
diff --git a/lib/stdlib/src/Makefile b/lib/stdlib/src/Makefile
index d6c0ff8d8d..ed3dfb342c 100644
--- a/lib/stdlib/src/Makefile
+++ b/lib/stdlib/src/Makefile
@@ -130,7 +130,7 @@ HRL_FILES= \
../include/qlc.hrl \
../include/zip.hrl
-INTERNAL_HRL_FILES= dets.hrl
+INTERNAL_HRL_FILES= dets.hrl erl_tar.hrl
ERL_FILES= $(MODULES:%=%.erl)
@@ -228,7 +228,7 @@ $(EBIN)/dets_v9.beam: dets.hrl
$(EBIN)/erl_bits.beam: ../include/erl_bits.hrl
$(EBIN)/erl_compile.beam: ../include/erl_compile.hrl ../../kernel/include/file.hrl
$(EBIN)/erl_lint.beam: ../include/erl_bits.hrl
-$(EBIN)/erl_tar.beam: ../../kernel/include/file.hrl
+$(EBIN)/erl_tar.beam: ../../kernel/include/file.hrl erl_tar.hrl
$(EBIN)/file_sorter.beam: ../../kernel/include/file.hrl
$(EBIN)/filelib.beam: ../../kernel/include/file.hrl
$(EBIN)/filename.beam: ../../kernel/include/file.hrl
diff --git a/lib/stdlib/src/beam_lib.erl b/lib/stdlib/src/beam_lib.erl
index d7ee5c1f5d..461acf03be 100644
--- a/lib/stdlib/src/beam_lib.erl
+++ b/lib/stdlib/src/beam_lib.erl
@@ -63,7 +63,7 @@
-type label() :: integer().
-type chunkid() :: nonempty_string(). % approximation of the strings below
-%% "Abst" | "Attr" | "CInf" | "ExpT" | "ImpT" | "LocT" | "Atom".
+%% "Abst" | "Attr" | "CInf" | "ExpT" | "ImpT" | "LocT" | "Atom" | "AtU8".
-type chunkname() :: 'abstract_code' | 'attributes' | 'compile_info'
| 'exports' | 'labeled_exports'
| 'imports' | 'indexed_imports'
@@ -520,6 +520,8 @@ read_chunk_data(File0, ChunkNames0, Options)
end.
%% -> {ok, list()} | throw(Error)
+check_chunks([atoms | Ids], File, IL, L) ->
+ check_chunks(Ids, File, ["Atom", "AtU8" | IL], [{atom_chunk, atoms} | L]);
check_chunks([ChunkName | Ids], File, IL, L) when is_atom(ChunkName) ->
ChunkId = chunk_name_to_id(ChunkName, File),
check_chunks(Ids, File, [ChunkId | IL], [{ChunkId, ChunkName} | L]);
@@ -537,6 +539,10 @@ scan_beam(File, What0, AllowMissingChunks) ->
case scan_beam1(File, What0) of
{missing, _FD, Mod, Data, What} when AllowMissingChunks ->
{ok, Mod, [{Id, missing_chunk} || Id <- What] ++ Data};
+ {missing, _FD, Mod, Data, ["Atom"]} ->
+ {ok, Mod, Data};
+ {missing, _FD, Mod, Data, ["AtU8"]} ->
+ {ok, Mod, Data};
{missing, FD, _Mod, _Data, What} ->
error({missing_chunk, filename(FD), hd(What)});
R ->
@@ -581,18 +587,23 @@ scan_beam(FD, Pos, What, Mod, Data) ->
error({invalid_beam_file, filename(FD), Pos})
end.
-get_data(Cs, "Atom"=Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, Encoding) ->
NewCs = del_chunk(Id, Cs),
{NFD, Chunk} = get_chunk(Id, Pos, Size, FD),
<<_Num:32, Chunk2/binary>> = Chunk,
- {Module, _} = extract_atom(Chunk2),
+ {Module, _} = extract_atom(Chunk2, Encoding),
C = case Cs of
info ->
{Id, Pos, Size};
_ ->
{Id, Chunk}
end,
- scan_beam(NFD, Pos2, NewCs, Module, [C | Data]);
+ scan_beam(NFD, Pos2, NewCs, Module, [C | Data]).
+
+get_data(Cs, "Atom" = Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+ get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, latin1);
+get_data(Cs, "AtU8" = Id, FD, Size, Pos, Pos2, _Mod, Data) ->
+ get_atom_data(Cs, Id, FD, Size, Pos, Pos2, Data, utf8);
get_data(info, Id, FD, Size, Pos, Pos2, Mod, Data) ->
scan_beam(FD, Pos2, info, Mod, [{Id, Pos, Size} | Data]);
get_data(Chunks, Id, FD, Size, Pos, Pos2, Mod, Data) ->
@@ -624,6 +635,9 @@ get_chunk(Id, Pos, Size, FD) ->
{NFD, Chunk}
end.
+chunks_to_data([{atom_chunk, Name} | CNs], Chunks, File, Cs, Module, Atoms, L) ->
+ {NewAtoms, Ret} = chunk_to_data(Name, <<"">>, File, Cs, Atoms, Module),
+ chunks_to_data(CNs, Chunks, File, Cs, Module, NewAtoms, [Ret | L]);
chunks_to_data([{Id, Name} | CNs], Chunks, File, Cs, Module, Atoms, L) ->
{_Id, Chunk} = lists:keyfind(Id, 1, Chunks),
{NewAtoms, Ret} = chunk_to_data(Name, Chunk, File, Cs, Atoms, Module),
@@ -651,7 +665,7 @@ chunk_to_data(abstract_code=Id, Chunk, File, _Cs, AtomTable, Mod) ->
<<>> ->
{AtomTable, {Id, no_abstract_code}};
<<0:8,N:8,Mode0:N/binary,Rest/binary>> ->
- Mode = list_to_atom(binary_to_list(Mode0)),
+ Mode = binary_to_atom(Mode0, utf8),
decrypt_abst(Mode, Mod, File, Id, AtomTable, Rest);
_ ->
case catch binary_to_term(Chunk) of
@@ -683,7 +697,6 @@ chunk_to_data(ChunkId, Chunk, _File,
_Cs, AtomTable, _Module) when is_list(ChunkId) ->
{AtomTable, {ChunkId, Chunk}}. % Chunk is a binary
-chunk_name_to_id(atoms, _) -> "Atom";
chunk_name_to_id(indexed_imports, _) -> "ImpT";
chunk_name_to_id(imports, _) -> "ImpT";
chunk_name_to_id(exports, _) -> "ExpT";
@@ -738,25 +751,30 @@ atm(AT, N) ->
%% AT is updated.
ensure_atoms({empty, AT}, Cs) ->
- {_Id, AtomChunk} = lists:keyfind("Atom", 1, Cs),
- extract_atoms(AtomChunk, AT),
+ case lists:keyfind("AtU8", 1, Cs) of
+ {_Id, AtomChunk} when is_binary(AtomChunk) ->
+ extract_atoms(AtomChunk, AT, utf8);
+ _ ->
+ {_Id, AtomChunk} = lists:keyfind("Atom", 1, Cs),
+ extract_atoms(AtomChunk, AT, latin1)
+ end,
AT;
ensure_atoms(AT, _Cs) ->
AT.
-extract_atoms(<<_Num:32, B/binary>>, AT) ->
- extract_atoms(B, 1, AT).
+extract_atoms(<<_Num:32, B/binary>>, AT, Encoding) ->
+ extract_atoms(B, 1, AT, Encoding).
-extract_atoms(<<>>, _I, _AT) ->
+extract_atoms(<<>>, _I, _AT, _Encoding) ->
true;
-extract_atoms(B, I, AT) ->
- {Atom, B1} = extract_atom(B),
+extract_atoms(B, I, AT, Encoding) ->
+ {Atom, B1} = extract_atom(B, Encoding),
true = ets:insert(AT, {I, Atom}),
- extract_atoms(B1, I+1, AT).
+ extract_atoms(B1, I+1, AT, Encoding).
-extract_atom(<<Len, B/binary>>) ->
+extract_atom(<<Len, B/binary>>, Encoding) ->
<<SB:Len/binary, Tail/binary>> = B,
- {list_to_atom(binary_to_list(SB)), Tail}.
+ {binary_to_atom(SB, Encoding), Tail}.
%%% Utils.
@@ -856,12 +874,12 @@ significant_chunks() ->
%% for a module. They are listed in the order that they should be MD5:ed.
md5_chunks() ->
- ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"].
+ ["Atom", "AtU8", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"].
%% The following chunks are mandatory in every Beam file.
mandatory_chunks() ->
- ["Code", "ExpT", "ImpT", "StrT", "Atom"].
+ ["Code", "ExpT", "ImpT", "StrT"].
%%% ====================================================================
%%% The rest of the file handles encrypted debug info.
diff --git a/lib/stdlib/src/binary.erl b/lib/stdlib/src/binary.erl
index ccc827ca2d..45666fbcb4 100644
--- a/lib/stdlib/src/binary.erl
+++ b/lib/stdlib/src/binary.erl
@@ -24,7 +24,7 @@
-export_type([cp/0]).
--opaque cp() :: {'am' | 'bm', binary()}.
+-opaque cp() :: {'am' | 'bm', reference()}.
-type part() :: {Start :: non_neg_integer(), Length :: integer()}.
%%% BIFs.
diff --git a/lib/stdlib/src/c.erl b/lib/stdlib/src/c.erl
index d36630214c..d3f9a9c7af 100644
--- a/lib/stdlib/src/c.erl
+++ b/lib/stdlib/src/c.erl
@@ -23,7 +23,7 @@
%% Avoid warning for local function error/2 clashing with autoimported BIF.
-compile({no_auto_import,[error/2]}).
--export([help/0,lc/1,c/1,c/2,nc/1,nc/2, nl/1,l/1,i/0,i/1,ni/0,
+-export([help/0,lc/1,c/1,c/2,c/3,nc/1,nc/2, nl/1,l/1,i/0,i/1,ni/0,
y/1, y/2,
lc_batch/0, lc_batch/1,
i/3,pid/3,m/0,m/1,mm/0,lm/0,
@@ -44,7 +44,7 @@
help() ->
io:put_chars(<<"bt(Pid) -- stack backtrace for a process\n"
- "c(File) -- compile and load code in <File>\n"
+ "c(Mod) -- compile and load module or file <Mod>\n"
"cd(Dir) -- change working directory\n"
"flush() -- flush any messages sent to the shell\n"
"help() -- help info\n"
@@ -72,32 +72,222 @@ help() ->
"xm(M) -- cross reference check a module\n"
"y(File) -- generate a Yecc parser\n">>).
-%% c(FileName)
-%% Compile a file/module.
+%% c(Module)
+%% Compile a module/file.
--spec c(File) -> {'ok', Module} | 'error' when
- File :: file:name(),
- Module :: module().
+-spec c(Module) -> {'ok', ModuleName} | 'error' when
+ Module :: file:name(),
+ ModuleName :: module().
-c(File) -> c(File, []).
+c(Module) -> c(Module, []).
--spec c(File, Options) -> {'ok', Module} | 'error' when
- File :: file:name(),
+-spec c(Module, Options) -> {'ok', ModuleName} | 'error' when
+ Module :: file:name(),
Options :: [compile:option()],
- Module :: module().
+ ModuleName :: module().
+
+c(Module, Opts) when is_atom(Module) ->
+ %% either a module name or a source file name (possibly without
+ %% suffix); if such a source file exists, it is used to compile from
+ %% scratch with the given options, otherwise look for an object file
+ Suffix = case filename:extension(Module) of
+ "" -> src_suffix(Opts);
+ S -> S
+ end,
+ SrcFile = filename:rootname(Module, Suffix) ++ Suffix,
+ case filelib:is_file(SrcFile) of
+ true ->
+ compile_and_load(SrcFile, Opts);
+ false ->
+ c(Module, Opts, fun (_) -> true end)
+ end;
+c(Module, Opts) ->
+ %% we never interpret a string as a module name, only as a file
+ compile_and_load(Module, Opts).
+
+%% This tries to find an existing object file and use its compile_info and
+%% source path to recompile the module, overwriting the old object file.
+%% The Filter parameter is applied to the old compile options
+
+-spec c(Module, Options, Filter) -> {'ok', ModuleName} | 'error' when
+ Module :: atom(),
+ Options :: [compile:option()],
+ Filter :: fun ((compile:option()) -> boolean()),
+ ModuleName :: module().
+
+c(Module, Options, Filter) when is_atom(Module) ->
+ case find_beam(Module) of
+ BeamFile when is_list(BeamFile) ->
+ c(Module, Options, Filter, BeamFile);
+ Error ->
+ {error, Error}
+ end.
+
+c(Module, Options, Filter, BeamFile) ->
+ case compile_info(Module, BeamFile) of
+ Info when is_list(Info) ->
+ case find_source(BeamFile, Info) of
+ SrcFile when is_list(SrcFile) ->
+ c(SrcFile, Options, Filter, BeamFile, Info);
+ Error ->
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+c(SrcFile, NewOpts, Filter, BeamFile, Info) ->
+ %% Filter old options; also remove options that will be replaced.
+ %% Write new beam over old beam unless other outdir is specified.
+ F = fun (Opt) -> not is_outdir_opt(Opt) andalso Filter(Opt) end,
+ Options = (NewOpts ++ [{outdir,filename:dirname(BeamFile)}]
+ ++ lists:filter(F, old_options(Info))),
+ format("Recompiling ~s\n", [SrcFile]),
+ safe_recompile(SrcFile, Options, BeamFile).
+
+old_options(Info) ->
+ case lists:keyfind(options, 1, Info) of
+ {options, Opts} -> Opts;
+ false -> []
+ end.
+
+%% prefer the source path in the compile info if the file exists,
+%% otherwise do a standard source search relative to the beam file
+find_source(BeamFile, Info) ->
+ case lists:keyfind(source, 1, Info) of
+ {source, SrcFile} ->
+ case filelib:is_file(SrcFile) of
+ true -> SrcFile;
+ false -> find_source(BeamFile)
+ end;
+ _ ->
+ find_source(BeamFile)
+ end.
+
+find_source(BeamFile) ->
+ case filelib:find_source(BeamFile) of
+ {ok, SrcFile} -> SrcFile;
+ _ -> {error, no_source}
+ end.
-c(File, Opts0) when is_list(Opts0) ->
- Opts = [report_errors,report_warnings|Opts0],
+%% find the beam file for a module, preferring the path reported by code:which()
+%% if it still exists, or otherwise by searching the code path
+find_beam(Module) when is_atom(Module) ->
+ case code:which(Module) of
+ Beam when is_list(Beam), Beam =/= "" ->
+ case erlang:module_loaded(Module) of
+ false ->
+ Beam; % code:which/1 found this in the path
+ true ->
+ case filelib:is_file(Beam) of
+ true -> Beam;
+ false -> find_beam_1(Module) % file moved?
+ end
+ end;
+ Other when Other =:= ""; Other =:= cover_compiled ->
+ %% module is loaded but not compiled directly from source
+ find_beam_1(Module);
+ Error ->
+ Error
+ end.
+
+find_beam_1(Module) ->
+ File = atom_to_list(Module) ++ code:objfile_extension(),
+ case code:where_is_file(File) of
+ Beam when is_list(Beam) ->
+ Beam;
+ Error ->
+ Error
+ end.
+
+%% get the compile_info for a module
+%% -will report the info for the module in memory, if loaded
+%% -will try to find and examine the beam file if not in memory
+%% -will not cause a module to become loaded by accident
+compile_info(Module, Beam) when is_atom(Module) ->
+ case erlang:module_loaded(Module) of
+ true ->
+ %% getting the compile info for a loaded module should normally
+ %% work, but return an empty info list if it fails
+ try erlang:get_module_info(Module, compile)
+ catch _:_ -> []
+ end;
+ false ->
+ case beam_lib:chunks(Beam, [compile_info]) of
+ {ok, {_Module, [{compile_info, Info}]}} ->
+ Info;
+ Error ->
+ Error
+ end
+ end.
+
+%% compile module, backing up any existing target file and restoring the
+%% old version if compilation fails (this should only be used when we have
+%% an old beam file that we want to preserve)
+safe_recompile(File, Options, BeamFile) ->
+ %% Note that it's possible that because of options such as 'to_asm',
+ %% the compiler might not actually write a new beam file at all
+ Backup = BeamFile ++ ".bak",
+ case file:rename(BeamFile, Backup) of
+ Status when Status =:= ok; Status =:= {error,enoent} ->
+ case compile_and_load(File, Options) of
+ {ok, _} = Result ->
+ _ = if Status =:= ok -> file:delete(Backup);
+ true -> ok
+ end,
+ Result;
+ Error ->
+ _ = if Status =:= ok -> file:rename(Backup, BeamFile);
+ true -> ok
+ end,
+ Error
+ end;
+ Error ->
+ Error
+ end.
+
+%% Compile the file and load the resulting object code (if any).
+%% Automatically ensures that there is an outdir option, by default the
+%% directory of File, and that a 'from' option will be passed to match the
+%% actual source suffix if needed (unless already specified).
+compile_and_load(File, Opts0) when is_list(Opts0) ->
+ Opts = [report_errors, report_warnings
+ | ensure_from(filename:extension(File),
+ ensure_outdir(filename:dirname(File), Opts0))],
case compile:file(File, Opts) of
{ok,Mod} -> %Listing file.
- machine_load(Mod, File, Opts);
+ purge_and_load(Mod, File, Opts);
{ok,Mod,_Ws} -> %Warnings maybe turned on.
- machine_load(Mod, File, Opts);
+ purge_and_load(Mod, File, Opts);
Other -> %Errors go here
Other
end;
-c(File, Opt) ->
- c(File, [Opt]).
+compile_and_load(File, Opt) ->
+ compile_and_load(File, [Opt]).
+
+ensure_from(Suffix, Opts0) ->
+ case lists:partition(fun is_from_opt/1, Opts0++from_opt(Suffix)) of
+ {[Opt|_], Opts} -> [Opt | Opts];
+ {[], Opts} -> Opts
+ end.
+
+ensure_outdir(Dir, Opts0) ->
+ {[Opt|_], Opts} = lists:partition(fun is_outdir_opt/1,
+ Opts0++[{outdir,Dir}]),
+ [Opt | Opts].
+
+is_outdir_opt({outdir, _}) -> true;
+is_outdir_opt(_) -> false.
+
+is_from_opt(from_core) -> true;
+is_from_opt(from_asm) -> true;
+is_from_opt(from_beam) -> true;
+is_from_opt(_) -> false.
+
+from_opt(".core") -> [from_core];
+from_opt(".S") -> [from_asm];
+from_opt(".beam") -> [from_beam];
+from_opt(_) -> [].
%%% Obtain the 'outdir' option from the argument. Return "." if no
%%% such option was given.
@@ -113,18 +303,29 @@ outdir([Opt|Rest]) ->
outdir(Rest)
end.
+%% mimic how suffix is selected in compile:file().
+src_suffix([from_core|_]) -> ".core";
+src_suffix([from_asm|_]) -> ".S";
+src_suffix([from_beam|_]) -> ".beam";
+src_suffix([_|Opts]) -> src_suffix(Opts);
+src_suffix([]) -> ".erl".
+
%%% We have compiled File with options Opts. Find out where the
-%%% output file went to, and load it.
-machine_load(Mod, File, Opts) ->
+%%% output file went and load it, purging any old version.
+purge_and_load(Mod, File, Opts) ->
Dir = outdir(Opts),
- File2 = filename:join(Dir, filename:basename(File, ".erl")),
+ Base = filename:basename(File, src_suffix(Opts)),
+ OutFile = filename:join(Dir, Base),
case compile:output_generated(Opts) of
true ->
- Base = atom_to_list(Mod),
- case filename:basename(File, ".erl") of
+ case atom_to_list(Mod) of
Base ->
code:purge(Mod),
- check_load(code:load_abs(File2,Mod), Mod);
+ %% Note that load_abs() adds the object file suffix
+ case code:load_abs(OutFile, Mod) of
+ {error, _R}=Error -> Error;
+ _ -> {ok, Mod}
+ end;
_OtherMod ->
format("** Module name '~p' does not match file name '~tp' **~n",
[Mod,File]),
@@ -135,13 +336,6 @@ machine_load(Mod, File, Opts) ->
ok
end.
-%%% This function previously warned if the loaded module was
-%%% loaded from some other place than current directory.
-%%% Now, loading from other than current directory is supposed to work.
-%%% so this function does nothing special.
-check_load({error, _R} = Error, _) -> Error;
-check_load(_, Mod) -> {ok, Mod}.
-
%% Compile a list of modules
%% enables the nice unix shell cmd
%% erl -s c lc f1 f2 f3 @d c1=v1 @c2 @i IDir @o ODir -s erlang halt
diff --git a/lib/stdlib/src/dets.erl b/lib/stdlib/src/dets.erl
index 5bc9475fc8..e81383775b 100644
--- a/lib/stdlib/src/dets.erl
+++ b/lib/stdlib/src/dets.erl
@@ -1063,11 +1063,8 @@ foldl_bins([Bin | Bins], MP, Terms) ->
compile_match_spec(select, ?PATTERN_TO_OBJECT_MATCH_SPEC('_') = Spec) ->
{Spec, true};
compile_match_spec(select, Spec) ->
- case catch ets:match_spec_compile(Spec) of
- X when is_binary(X) ->
- {Spec, {match_spec, X}};
- _ ->
- badarg
+ try {Spec, {match_spec, ets:match_spec_compile(Spec)}}
+ catch error:_ -> badarg
end;
compile_match_spec(object, Pat) ->
compile_match_spec(select, ?PATTERN_TO_OBJECT_MATCH_SPEC(Pat));
diff --git a/lib/stdlib/src/edlin_expand.erl b/lib/stdlib/src/edlin_expand.erl
index 5f821caef0..a1a97af4c5 100644
--- a/lib/stdlib/src/edlin_expand.erl
+++ b/lib/stdlib/src/edlin_expand.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2005-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2005-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -101,44 +101,77 @@ match(Prefix, Alts, Extra0) ->
%% Return the list of names L in multiple columns.
format_matches(L) ->
- S = format_col(lists:sort(L), []),
+ {S1, Dots} = format_col(lists:sort(L), []),
+ S = case Dots of
+ true ->
+ {_, Prefix} = longest_common_head(vals(L)),
+ PrefixLen = length(Prefix),
+ case PrefixLen =< 3 of
+ true -> S1; % Do not replace the prefix with "...".
+ false ->
+ LeadingDotsL = leading_dots(L, PrefixLen),
+ {S2, _} = format_col(lists:sort(LeadingDotsL), []),
+ S2
+ end;
+ false -> S1
+ end,
["\n" | S].
format_col([], _) -> [];
-format_col(L, Acc) -> format_col(L, field_width(L), 0, Acc).
-
-format_col(X, Width, Len, Acc) when Width + Len > 79 ->
- format_col(X, Width, 0, ["\n" | Acc]);
-format_col([A|T], Width, Len, Acc0) ->
- H = case A of
- %% If it's a tuple {string(), integer()}, we assume it's an
- %% arity, and meant to be printed.
- {H0, I} when is_integer(I) ->
- H0 ++ "/" ++ integer_to_list(I);
- {H1, _} -> H1;
- H2 -> H2
- end,
- Acc = [io_lib:format("~-*ts", [Width,H]) | Acc0],
- format_col(T, Width, Len+Width, Acc);
-format_col([], _, _, Acc) ->
- lists:reverse(Acc, "\n").
-
-field_width(L) -> field_width(L, 0).
-
-field_width([{H,_}|T], W) ->
+format_col(L, Acc) ->
+ LL = 79,
+ format_col(L, field_width(L, LL), 0, Acc, LL, false).
+
+format_col(X, Width, Len, Acc, LL, Dots) when Width + Len > LL ->
+ format_col(X, Width, 0, ["\n" | Acc], LL, Dots);
+format_col([A|T], Width, Len, Acc0, LL, Dots) ->
+ {H0, R} = format_val(A),
+ Hmax = LL - length(R),
+ {H, NewDots} =
+ case length(H0) > Hmax of
+ true -> {io_lib:format("~-*ts", [Hmax - 3, H0]) ++ "...", true};
+ false -> {H0, Dots}
+ end,
+ Acc = [io_lib:format("~-*ts", [Width, H ++ R]) | Acc0],
+ format_col(T, Width, Len+Width, Acc, LL, NewDots);
+format_col([], _, _, Acc, _LL, Dots) ->
+ {lists:reverse(Acc, "\n"), Dots}.
+
+format_val({H, I}) when is_integer(I) ->
+ %% If it's a tuple {string(), integer()}, we assume it's an
+ %% arity, and meant to be printed.
+ {H, "/" ++ integer_to_list(I)};
+format_val({H, _}) ->
+ {H, ""};
+format_val(H) ->
+ {H, ""}.
+
+field_width(L, LL) -> field_width(L, 0, LL).
+
+field_width([{H,_}|T], W, LL) ->
case length(H) of
- L when L > W -> field_width(T, L);
- _ -> field_width(T, W)
+ L when L > W -> field_width(T, L, LL);
+ _ -> field_width(T, W, LL)
end;
-field_width([H|T], W) ->
+field_width([H|T], W, LL) ->
case length(H) of
- L when L > W -> field_width(T, L);
- _ -> field_width(T, W)
+ L when L > W -> field_width(T, L, LL);
+ _ -> field_width(T, W, LL)
end;
-field_width([], W) when W < 40 ->
+field_width([], W, LL) when W < LL - 3 ->
W + 4;
-field_width([], _) ->
- 40.
+field_width([], _, LL) ->
+ LL.
+
+vals([]) -> [];
+vals([{S, _}|L]) -> [S|vals(L)];
+vals([S|L]) -> [S|vals(L)].
+
+leading_dots([], _Len) -> [];
+leading_dots([{H, I}|L], Len) ->
+ [{"..." ++ nthtail(Len, H), I}|leading_dots(L, Len)];
+leading_dots([H|L], Len) ->
+ ["..." ++ nthtail(Len, H)|leading_dots(L, Len)].
longest_common_head([]) ->
no;
diff --git a/lib/stdlib/src/erl_expand_records.erl b/lib/stdlib/src/erl_expand_records.erl
index 2280464bff..16220bceb4 100644
--- a/lib/stdlib/src/erl_expand_records.erl
+++ b/lib/stdlib/src/erl_expand_records.erl
@@ -30,13 +30,13 @@
-import(lists, [map/2,foldl/3,foldr/3,sort/1,reverse/1,duplicate/2]).
--record(exprec, {compile=[], % Compile flags
- vcount=0, % Variable counter
- calltype=#{}, % Call types
- records=dict:new(), % Record definitions
- strict_ra=[], % strict record accesses
- checked_ra=[] % successfully accessed records
- }).
+-record(exprec, {compile=[], % Compile flags
+ vcount=0, % Variable counter
+ calltype=#{}, % Call types
+ records=#{}, % Record definitions
+ strict_ra=[], % strict record accesses
+ checked_ra=[] % successfully accessed records
+ }).
-spec(module(AbsForms, CompileOptions) -> AbsForms2 when
AbsForms :: [erl_parse:abstract_form()],
@@ -72,7 +72,7 @@ init_calltype_imports([], Ctype) -> Ctype.
forms([{attribute,_,record,{Name,Defs}}=Attr | Fs], St0) ->
NDefs = normalise_fields(Defs),
- St = St0#exprec{records=dict:store(Name, NDefs, St0#exprec.records)},
+ St = St0#exprec{records=maps:put(Name, NDefs, St0#exprec.records)},
{Fs1, St1} = forms(Fs, St),
{[Attr | Fs1], St1};
forms([{function,L,N,A,Cs0} | Fs0], St0) ->
@@ -546,7 +546,7 @@ normalise_fields(Fs) ->
%% record_fields(RecordName, State)
%% find_field(FieldName, Fields)
-record_fields(R, St) -> dict:fetch(R, St#exprec.records).
+record_fields(R, St) -> maps:get(R, St#exprec.records).
find_field(F, [{record_field,_,{atom,_,F},Val} | _]) -> {ok,Val};
find_field(F, [_ | Fs]) -> find_field(F, Fs);
diff --git a/lib/stdlib/src/erl_tar.erl b/lib/stdlib/src/erl_tar.erl
index a383a0fc67..086e77cd28 100644
--- a/lib/stdlib/src/erl_tar.erl
+++ b/lib/stdlib/src/erl_tar.erl
@@ -1,8 +1,8 @@
%%
%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 1997-2016. All Rights Reserved.
-%%
+%%
+%% Copyright Ericsson AB 1997-2017. All Rights Reserved.
+%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
@@ -14,191 +14,245 @@
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-%%
+%%
%% %CopyrightEnd%
%%
+%% This module implements extraction/creation of tar archives.
+%% It supports reading most common tar formats, namely V7, STAR,
+%% USTAR, GNU, BSD/libarchive, and PAX. It produces archives in USTAR
+%% format, unless it must use PAX headers, in which case it produces PAX
+%% format.
+%%
+%% The following references where used:
+%% http://www.freebsd.org/cgi/man.cgi?query=tar&sektion=5
+%% http://www.gnu.org/software/tar/manual/html_node/Standard.html
+%% http://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html
-module(erl_tar).
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% Purpose: Unix tar (tape archive) utility.
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
--export([init/3, create/2, create/3, extract/1, extract/2, table/1, table/2,
- open/2, close/1, add/3, add/4,
- t/1, tt/1, format_error/1]).
+-export([init/3,
+ create/2, create/3,
+ extract/1, extract/2,
+ table/1, table/2, t/1, tt/1,
+ open/2, close/1,
+ add/3, add/4,
+ format_error/1]).
-include_lib("kernel/include/file.hrl").
+-include_lib("erl_tar.hrl").
--record(add_opts,
- {read_info, % Fun to use for read file/link info.
- chunk_size = 0, % For file reading when sending to sftp. 0=do not chunk
- verbose = false :: boolean()}). % Verbose on/off.
-
-%% Opens a tar archive.
-
-init(UsrHandle, AccessMode, Fun) when is_function(Fun,2) ->
- {ok, {AccessMode,{tar_descriptor,UsrHandle,Fun}}}.
-
-%%%================================================================
-%%% The open function with friends is to keep the file and binary api of this module
-open(Name, Mode) ->
- case open_mode(Mode) of
- {ok, Access, Raw, Opts} ->
- open1(Name, Access, Raw, Opts);
- {error, Reason} ->
- {error, {Name, Reason}}
- end.
-
-open1({binary,Bin}, read, _Raw, Opts) ->
- case file:open(Bin, [ram,binary,read]) of
- {ok,File} ->
- _ = [ram_file:uncompress(File) || Opts =:= [compressed]],
- init(File,read,file_fun());
- Error ->
- Error
- end;
-open1({file, Fd}, read, _Raw, _Opts) ->
- init(Fd, read, file_fun());
-open1(Name, Access, Raw, Opts) ->
- case file:open(Name, Raw ++ [binary, Access|Opts]) of
- {ok, File} ->
- init(File, Access, file_fun());
- {error, Reason} ->
- {error, {Name, Reason}}
- end.
-
-file_fun() ->
- fun(write, {Fd,Data}) -> file:write(Fd, Data);
- (position, {Fd,Pos}) -> file:position(Fd, Pos);
- (read2, {Fd,Size}) -> file:read(Fd,Size);
- (close, Fd) -> file:close(Fd)
- end.
-
-%%% End of file and binary api (except for open_mode/1 downwards
-%%%================================================================
-
-%% Closes a tar archive.
-
-close({read, File}) ->
- ok = do_close(File);
-close({write, File}) ->
- PadResult = pad_file(File),
- ok = do_close(File),
- PadResult;
-close(_) ->
- {error, einval}.
-
-%% Adds a file to a tape archive.
-
-add(File, Name, Options) ->
- add(File, Name, Name, Options).
-add({write, File}, Name, NameInArchive, Options) ->
- Opts = #add_opts{read_info=fun(F) -> file:read_link_info(F) end},
- add1(File, Name, NameInArchive, add_opts(Options, Opts));
-add({read, _File}, _, _, _) ->
- {error, eacces};
-add(_, _, _, _) ->
- {error, einval}.
-
-add_opts([dereference|T], Opts) ->
- add_opts(T, Opts#add_opts{read_info=fun(F) -> file:read_file_info(F) end});
-add_opts([verbose|T], Opts) ->
- add_opts(T, Opts#add_opts{verbose=true});
-add_opts([{chunks,N}|T], Opts) ->
- add_opts(T, Opts#add_opts{chunk_size=N});
-add_opts([_|T], Opts) ->
- add_opts(T, Opts);
-add_opts([], Opts) ->
- Opts.
-
-%% Creates a tar file Name containing the given files.
-
-create(Name, Filenames) ->
- create(Name, Filenames, []).
-
-%% Creates a tar archive Name containing the given files.
-%% Accepted options: verbose, compressed, cooked
+%% Converts the short error reason to a descriptive string.
+-spec format_error(term()) -> string().
+format_error(invalid_tar_checksum) ->
+ "Checksum failed";
+format_error(bad_header) ->
+ "Unrecognized tar header format";
+format_error({bad_header, Reason}) ->
+ lists:flatten(io_lib:format("Unrecognized tar header format: ~p", [Reason]));
+format_error({invalid_header, negative_size}) ->
+ "Invalid header: negative size";
+format_error(invalid_sparse_header_size) ->
+ "Invalid sparse header: negative size";
+format_error(invalid_sparse_map_entry) ->
+ "Invalid sparse map entry";
+format_error({invalid_sparse_map_entry, Reason}) ->
+ lists:flatten(io_lib:format("Invalid sparse map entry: ~p", [Reason]));
+format_error(invalid_end_of_archive) ->
+ "Invalid end of archive";
+format_error(eof) ->
+ "Unexpected end of file";
+format_error(integer_overflow) ->
+ "Failed to parse numeric: integer overflow";
+format_error({misaligned_read, Pos}) ->
+ lists:flatten(io_lib:format("Read a block which was misaligned: block_size=~p pos=~p",
+ [?BLOCK_SIZE, Pos]));
+format_error(invalid_gnu_1_0_sparsemap) ->
+ "Invalid GNU sparse map (version 1.0)";
+format_error({invalid_gnu_0_1_sparsemap, Format}) ->
+ lists:flatten(io_lib:format("Invalid GNU sparse map (version ~s)", [Format]));
+format_error({Name,Reason}) ->
+ lists:flatten(io_lib:format("~ts: ~ts", [Name,format_error(Reason)]));
+format_error(Atom) when is_atom(Atom) ->
+ file:format_error(Atom);
+format_error(Term) ->
+ lists:flatten(io_lib:format("~tp", [Term])).
-create(Name, FileList, Options) ->
- Mode = lists:filter(fun(X) -> (X=:=compressed) or (X=:=cooked)
- end, Options),
- case open(Name, [write|Mode]) of
- {ok, TarFile} ->
- Add = fun({NmInA, NmOrBin}) ->
- add(TarFile, NmOrBin, NmInA, Options);
- (Nm) ->
- add(TarFile, Nm, Nm, Options)
- end,
- Result = foreach_while_ok(Add, FileList),
- case {Result, close(TarFile)} of
- {ok, Res} -> Res;
- {Res, _} -> Res
- end;
- Reason ->
- Reason
- end.
+%% Initializes a new reader given a custom file handle and I/O wrappers
+-spec init(handle(), write | read, file_op()) -> {ok, reader()} | {error, badarg}.
+init(Handle, AccessMode, Fun) when is_function(Fun, 2) ->
+ Reader = #reader{handle=Handle,access=AccessMode,func=Fun},
+ {ok, Pos, Reader2} = do_position(Reader, {cur, 0}),
+ {ok, Reader2#reader{pos=Pos}};
+init(_Handle, _AccessMode, _Fun) ->
+ {error, badarg}.
+%%%================================================================
%% Extracts all files from the tar file Name.
-
+-spec extract(open_handle()) -> ok | {error, term()}.
extract(Name) ->
extract(Name, []).
%% Extracts (all) files from the tar file Name.
-%% Options accepted: keep_old_files, {files, ListOfFilesToExtract}, verbose,
-%% {cwd, AbsoluteDirectory}
+%% Options accepted:
+%% - cooked: Opens the tar file without mode `raw`
+%% - compressed: Uncompresses the tar file when reading
+%% - memory: Returns the tar contents as a list of tuples {Name, Bin}
+%% - keep_old_files: Extracted files will not overwrite the destination
+%% - {files, ListOfFilesToExtract}: Only extract ListOfFilesToExtract
+%% - verbose: Prints verbose information about the extraction,
+%% - {cwd, AbsoluteDir}: Sets the current working directory for the extraction
+-spec extract(open_handle(), [extract_opt()]) ->
+ ok
+ | {ok, [{string(), binary()}]}
+ | {error, term()}.
+extract({binary, Bin}, Opts) when is_list(Opts) ->
+ do_extract({binary, Bin}, Opts);
+extract({file, Fd}, Opts) when is_list(Opts) ->
+ do_extract({file, Fd}, Opts);
+extract(#reader{}=Reader, Opts) when is_list(Opts) ->
+ do_extract(Reader, Opts);
+extract(Name, Opts) when is_list(Name); is_binary(Name), is_list(Opts) ->
+ do_extract(Name, Opts).
+
+do_extract(Handle, Opts) when is_list(Opts) ->
+ Opts2 = extract_opts(Opts),
+ Acc = if Opts2#read_opts.output =:= memory -> []; true -> ok end,
+ foldl_read(Handle, fun extract1/4, Acc, Opts2).
+
+extract1(eof, Reader, _, Acc) when is_list(Acc) ->
+ {ok, {ok, lists:reverse(Acc)}, Reader};
+extract1(eof, Reader, _, Acc) ->
+ {ok, Acc, Reader};
+extract1(#tar_header{name=Name,size=Size}=Header, Reader, Opts, Acc) ->
+ case check_extract(Name, Opts) of
+ true ->
+ case do_read(Reader, Size) of
+ {ok, Bin, Reader2} ->
+ case write_extracted_element(Header, Bin, Opts) of
+ ok ->
+ {ok, Acc, Reader2};
+ {ok, NameBin} when is_list(Acc) ->
+ {ok, [NameBin | Acc], Reader2};
+ {error, _} = Err ->
+ throw(Err)
+ end;
+ {error, _} = Err ->
+ throw(Err)
+ end;
+ false ->
+ {ok, Acc, skip_file(Reader)}
+ end.
-extract(Name, Opts) ->
- foldl_read(Name, fun extract1/4, ok, extract_opts(Opts)).
+%% Checks if the file Name should be extracted.
+check_extract(_, #read_opts{files=all}) ->
+ true;
+check_extract(Name, #read_opts{files=Files}) ->
+ ordsets:is_element(Name, Files).
-%% Returns a list of names of the files in the tar file Name.
-%% Options accepted: verbose
+%%%================================================================
+%% The following table functions produce a list of information about
+%% the files contained in the archive.
+-type filename() :: string().
+-type typeflag() :: regular | link | symlink |
+ char | block | directory |
+ fifo | reserved | unknown.
+-type mode() :: non_neg_integer().
+-type uid() :: non_neg_integer().
+-type gid() :: non_neg_integer().
+
+-type tar_entry() :: {filename(),
+ typeflag(),
+ non_neg_integer(),
+ calendar:datetime(),
+ mode(),
+ uid(),
+ gid()}.
+%% Returns a list of names of the files in the tar file Name.
+-spec table(open_handle()) -> {ok, [string()]} | {error, term()}.
table(Name) ->
table(Name, []).
%% Returns a list of names of the files in the tar file Name.
%% Options accepted: compressed, verbose, cooked.
-
-table(Name, Opts) ->
+-spec table(open_handle(), [compressed | verbose | cooked]) ->
+ {ok, [tar_entry()]} | {error, term()}.
+table(Name, Opts) when is_list(Opts) ->
foldl_read(Name, fun table1/4, [], table_opts(Opts)).
+table1(eof, Reader, _, Result) ->
+ {ok, {ok, lists:reverse(Result)}, Reader};
+table1(#tar_header{}=Header, Reader, #read_opts{verbose=Verbose}, Result) ->
+ Attrs = table1_attrs(Header, Verbose),
+ Reader2 = skip_file(Reader),
+ {ok, [Attrs|Result], Reader2}.
+
+%% Extracts attributes relevant to table1's output
+table1_attrs(#tar_header{typeflag=Typeflag,mode=Mode}=Header, true) ->
+ Type = typeflag(Typeflag),
+ Name = Header#tar_header.name,
+ Mtime = Header#tar_header.mtime,
+ Uid = Header#tar_header.uid,
+ Gid = Header#tar_header.gid,
+ Size = Header#tar_header.size,
+ {Name, Type, Size, Mtime, Mode, Uid, Gid};
+table1_attrs(#tar_header{name=Name}, _Verbose) ->
+ Name.
+
+typeflag(?TYPE_REGULAR) -> regular;
+typeflag(?TYPE_REGULAR_A) -> regular;
+typeflag(?TYPE_GNU_SPARSE) -> regular;
+typeflag(?TYPE_CONT) -> regular;
+typeflag(?TYPE_LINK) -> link;
+typeflag(?TYPE_SYMLINK) -> symlink;
+typeflag(?TYPE_CHAR) -> char;
+typeflag(?TYPE_BLOCK) -> block;
+typeflag(?TYPE_DIR) -> directory;
+typeflag(?TYPE_FIFO) -> fifo;
+typeflag(_) -> unknown.
+%%%================================================================
%% Comments for printing the contents of a tape archive,
%% meant to be invoked from the shell.
-t(Name) ->
+%% Prints each filename in the archive
+-spec t(file:filename()) -> ok | {error, term()}.
+t(Name) when is_list(Name); is_binary(Name) ->
case table(Name) of
- {ok, List} ->
- lists:foreach(fun(N) -> ok = io:format("~ts\n", [N]) end, List);
- Error ->
- Error
+ {ok, List} ->
+ lists:foreach(fun(N) -> ok = io:format("~ts\n", [N]) end, List);
+ Error ->
+ Error
end.
+%% Prints verbose information about each file in the archive
+-spec tt(open_handle()) -> ok | {error, term()}.
tt(Name) ->
case table(Name, [verbose]) of
- {ok, List} ->
- lists:foreach(fun print_header/1, List);
- Error ->
- Error
+ {ok, List} ->
+ lists:foreach(fun print_header/1, List);
+ Error ->
+ Error
end.
+%% Used by tt/1 to print a tar_entry tuple
+-spec print_header(tar_entry()) -> ok.
print_header({Name, Type, Size, Mtime, Mode, Uid, Gid}) ->
io:format("~s~s ~4w/~-4w ~7w ~s ~s\n",
- [type_to_string(Type), mode_to_string(Mode),
- Uid, Gid, Size, time_to_string(Mtime), Name]).
+ [type_to_string(Type), mode_to_string(Mode),
+ Uid, Gid, Size, time_to_string(Mtime), Name]).
-type_to_string(regular) -> "-";
+type_to_string(regular) -> "-";
type_to_string(directory) -> "d";
-type_to_string(link) -> "l";
-type_to_string(symlink) -> "s";
-type_to_string(char) -> "c";
-type_to_string(block) -> "b";
-type_to_string(fifo) -> "f";
-type_to_string(_) -> "?".
-
+type_to_string(link) -> "l";
+type_to_string(symlink) -> "s";
+type_to_string(char) -> "c";
+type_to_string(block) -> "b";
+type_to_string(fifo) -> "f";
+type_to_string(unknown) -> "?".
+
+%% Converts a numeric mode to its human-readable representation
mode_to_string(Mode) ->
mode_to_string(Mode, "xwrxwrxwr", []).
-
mode_to_string(Mode, [C|T], Acc) when Mode band 1 =:= 1 ->
mode_to_string(Mode bsr 1, T, [C|Acc]);
mode_to_string(Mode, [_|T], Acc) ->
@@ -206,6 +260,7 @@ mode_to_string(Mode, [_|T], Acc) ->
mode_to_string(_, [], Acc) ->
Acc.
+%% Converts a datetime tuple to a readable string
time_to_string({{Y, Mon, Day}, {H, Min, _}}) ->
io_lib:format("~s ~2w ~s:~s ~w", [month(Mon), Day, two_d(H), two_d(Min), Y]).
@@ -225,809 +280,1608 @@ month(10) -> "Oct";
month(11) -> "Nov";
month(12) -> "Dec".
-%% Converts the short error reason to a descriptive string.
+%%%================================================================
+%% The open function with friends is to keep the file and binary api of this module
+-type open_handle() :: file:filename()
+ | {binary, binary()}
+ | {file, term()}.
+-spec open(open_handle(), [write | compressed | cooked]) ->
+ {ok, reader()} | {error, term()}.
+open({binary, Bin}, Mode) when is_binary(Bin) ->
+ do_open({binary, Bin}, Mode);
+open({file, Fd}, Mode) ->
+ do_open({file, Fd}, Mode);
+open(Name, Mode) when is_list(Name); is_binary(Name) ->
+ do_open(Name, Mode).
+
+do_open(Name, Mode) when is_list(Mode) ->
+ case open_mode(Mode) of
+ {ok, Access, Raw, Opts} ->
+ open1(Name, Access, Raw, Opts);
+ {error, Reason} ->
+ {error, {Name, Reason}}
+ end.
-format_error(bad_header) -> "Bad directory header";
-format_error(eof) -> "Unexpected end of file";
-format_error(symbolic_link_too_long) -> "Symbolic link too long";
-format_error({Name,Reason}) ->
- lists:flatten(io_lib:format("~ts: ~ts", [Name,format_error(Reason)]));
-format_error(Atom) when is_atom(Atom) ->
- file:format_error(Atom);
-format_error(Term) ->
- lists:flatten(io_lib:format("~tp", [Term])).
+open1({binary,Bin}, read, _Raw, Opts) when is_binary(Bin) ->
+ case file:open(Bin, [ram,binary,read]) of
+ {ok,File} ->
+ _ = [ram_file:uncompress(File) || Opts =:= [compressed]],
+ {ok, #reader{handle=File,access=read,func=fun file_op/2}};
+ Error ->
+ Error
+ end;
+open1({file, Fd}, read, _Raw, _Opts) ->
+ Reader = #reader{handle=Fd,access=read,func=fun file_op/2},
+ case do_position(Reader, {cur, 0}) of
+ {ok, Pos, Reader2} ->
+ {ok, Reader2#reader{pos=Pos}};
+ {error, _} = Err ->
+ Err
+ end;
+open1(Name, Access, Raw, Opts) when is_list(Name) or is_binary(Name) ->
+ case file:open(Name, Raw ++ [binary, Access|Opts]) of
+ {ok, File} ->
+ {ok, #reader{handle=File,access=Access,func=fun file_op/2}};
+ {error, Reason} ->
+ {error, {Name, Reason}}
+ end.
+open_mode(Mode) ->
+ open_mode(Mode, false, [raw], []).
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%
-%%% Useful definitions (also start of implementation).
-%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% Offset for fields in the tar header.
-%% Note that these offsets are ZERO-based as in the POSIX standard
-%% document, while binaries use ONE-base offset. Caveat Programmer.
-
--define(th_name, 0).
--define(th_mode, 100).
--define(th_uid, 108).
--define(th_gid, 116).
--define(th_size, 124).
--define(th_mtime, 136).
--define(th_chksum, 148).
--define(th_typeflag, 156).
--define(th_linkname, 157).
--define(th_magic, 257).
--define(th_version, 263).
--define(th_prefix, 345).
-
-%% Length of these fields.
-
--define(th_name_len, 100).
--define(th_mode_len, 8).
--define(th_uid_len, 8).
--define(th_gid_len, 8).
--define(th_size_len, 12).
--define(th_mtime_len, 12).
--define(th_chksum_len, 8).
--define(th_linkname_len, 100).
--define(th_magic_len, 6).
--define(th_version_len, 2).
--define(th_prefix_len, 167).
-
--record(tar_header,
- {name, % Name of file.
- mode, % Mode bits.
- uid, % User id.
- gid, % Group id.
- size, % Size of file
- mtime, % Last modified (seconds since
- % Jan 1, 1970).
- chksum, % Checksum of header.
- typeflag = [], % Type of file.
- linkname = [], % Name of link.
- filler = [],
- prefix}). % Filename prefix.
-
--define(record_size, 512).
--define(block_size, (512*20)).
-
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%
-%%% Adding members to a tar archive.
-%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-add1(TarFile, Bin, NameInArchive, Opts) when is_binary(Bin) ->
- Now = calendar:now_to_local_time(erlang:timestamp()),
- Info = #file_info{size = byte_size(Bin),
- type = regular,
- access = read_write,
- atime = Now,
- mtime = Now,
- ctime = Now,
- mode = 8#100644,
- links = 1,
- major_device = 0,
- minor_device = 0,
- inode = 0,
- uid = 0,
- gid = 0},
- Header = create_header(NameInArchive, Info),
- add1(TarFile, NameInArchive, Header, Bin, Opts);
-add1(TarFile, Name, NameInArchive, Opts) ->
- case read_file_and_info(Name, Opts) of
- {ok, Bin, Info} when Info#file_info.type =:= regular ->
- Header = create_header(NameInArchive, Info),
- add1(TarFile, Name, Header, Bin, Opts);
- {ok, PointsTo, Info} when Info#file_info.type =:= symlink ->
- if
- length(PointsTo) > 100 ->
- {error,{PointsTo,symbolic_link_too_long}};
- true ->
- Info2 = Info#file_info{size=0},
- Header = create_header(NameInArchive, Info2, PointsTo),
- add1(TarFile, Name, Header, list_to_binary([]), Opts)
- end;
- {ok, _, Info} when Info#file_info.type =:= directory ->
- add_directory(TarFile, Name, NameInArchive, Info, Opts);
- {ok, _, #file_info{type=Type}} ->
- {error, {bad_file_type, Name, Type}};
- {error, Reason} ->
- {error, {Name, Reason}}
+open_mode(read, _, Raw, _) ->
+ {ok, read, Raw, []};
+open_mode(write, _, Raw, _) ->
+ {ok, write, Raw, []};
+open_mode([read|Rest], false, Raw, Opts) ->
+ open_mode(Rest, read, Raw, Opts);
+open_mode([write|Rest], false, Raw, Opts) ->
+ open_mode(Rest, write, Raw, Opts);
+open_mode([compressed|Rest], Access, Raw, Opts) ->
+ open_mode(Rest, Access, Raw, [compressed|Opts]);
+open_mode([cooked|Rest], Access, _Raw, Opts) ->
+ open_mode(Rest, Access, [], Opts);
+open_mode([], Access, Raw, Opts) ->
+ {ok, Access, Raw, Opts};
+open_mode(_, _, _, _) ->
+ {error, einval}.
+
+file_op(write, {Fd, Data}) ->
+ file:write(Fd, Data);
+file_op(position, {Fd, Pos}) ->
+ file:position(Fd, Pos);
+file_op(read2, {Fd, Size}) ->
+ file:read(Fd, Size);
+file_op(close, Fd) ->
+ file:close(Fd).
+
+%% Closes a tar archive.
+-spec close(reader()) -> ok | {error, term()}.
+close(#reader{access=read}=Reader) ->
+ ok = do_close(Reader);
+close(#reader{access=write}=Reader) ->
+ {ok, Reader2} = pad_file(Reader),
+ ok = do_close(Reader2),
+ ok;
+close(_) ->
+ {error, einval}.
+
+pad_file(#reader{pos=Pos}=Reader) ->
+ %% There must be at least two zero blocks at the end.
+ PadCurrent = skip_padding(Pos+?BLOCK_SIZE),
+ Padding = <<0:PadCurrent/unit:8>>,
+ do_write(Reader, [Padding, ?ZERO_BLOCK, ?ZERO_BLOCK]).
+
+
+%%%================================================================
+%% Creation/modification of tar archives
+
+%% Creates a tar file Name containing the given files.
+-spec create(file:filename(), filelist()) -> ok | {error, {string(), term()}}.
+create(Name, FileList) when is_list(Name); is_binary(Name) ->
+ create(Name, FileList, []).
+
+%% Creates a tar archive Name containing the given files.
+%% Accepted options: verbose, compressed, cooked
+-spec create(file:filename(), filelist(), [create_opt()]) ->
+ ok | {error, term()} | {error, {string(), term()}}.
+create(Name, FileList, Options) when is_list(Name); is_binary(Name) ->
+ Mode = lists:filter(fun(X) -> (X=:=compressed) or (X=:=cooked)
+ end, Options),
+ case open(Name, [write|Mode]) of
+ {ok, TarFile} ->
+ do_create(TarFile, FileList, Options);
+ {error, _} = Err ->
+ Err
end.
-add1(Tar, Name, Header, chunked, Options) ->
- add_verbose(Options, "a ~ts [chunked ", [Name]),
- try
- ok = do_write(Tar, Header),
- {ok,D} = file:open(Name, [read,binary]),
- {ok,NumBytes} = add_read_write_chunks(D, Tar, Options#add_opts.chunk_size, 0, Options),
- _ = file:close(D),
- ok = do_write(Tar, padding(NumBytes,?record_size))
- of
- ok ->
- add_verbose(Options, "~n", []),
- ok
- catch
- error:{badmatch,{error,Error}} ->
- add_verbose(Options, "~n", []),
- {error,{Name,Error}}
+do_create(TarFile, [], _Opts) ->
+ close(TarFile);
+do_create(TarFile, [{NameInArchive, NameOrBin}|Rest], Opts) ->
+ case add(TarFile, NameOrBin, NameInArchive, Opts) of
+ ok ->
+ do_create(TarFile, Rest, Opts);
+ {error, _} = Err ->
+ _ = close(TarFile),
+ Err
end;
-add1(Tar, Name, Header, Bin, Options) ->
- add_verbose(Options, "a ~ts~n", [Name]),
- do_write(Tar, [Header, Bin, padding(byte_size(Bin), ?record_size)]).
-
-add_read_write_chunks(D, Tar, ChunkSize, SumNumBytes, Options) ->
- case file:read(D, ChunkSize) of
- {ok,Bin} ->
- ok = do_write(Tar, Bin),
- add_verbose(Options, ".", []),
- add_read_write_chunks(D, Tar, ChunkSize, SumNumBytes+byte_size(Bin), Options);
- eof ->
- add_verbose(Options, "]", []),
- {ok,SumNumBytes};
- Other ->
- Other
+do_create(TarFile, [Name|Rest], Opts) ->
+ case add(TarFile, Name, Name, Opts) of
+ ok ->
+ do_create(TarFile, Rest, Opts);
+ {error, _} = Err ->
+ _ = close(TarFile),
+ Err
end.
-add_directory(TarFile, DirName, NameInArchive, Info, Options) ->
+%% Adds a file to a tape archive.
+-type add_type() :: string()
+ | {string(), string()}
+ | {string(), binary()}.
+-spec add(reader(), add_type(), [add_opt()]) -> ok | {error, term()}.
+add(Reader, {NameInArchive, Name}, Opts)
+ when is_list(NameInArchive), is_list(Name) ->
+ do_add(Reader, Name, NameInArchive, Opts);
+add(Reader, {NameInArchive, Bin}, Opts)
+ when is_list(NameInArchive), is_binary(Bin) ->
+ do_add(Reader, Bin, NameInArchive, Opts);
+add(Reader, Name, Opts) when is_list(Name) ->
+ do_add(Reader, Name, Name, Opts).
+
+
+-spec add(reader(), string() | binary(), string(), [add_opt()]) ->
+ ok | {error, term()}.
+add(Reader, NameOrBin, NameInArchive, Options)
+ when is_list(NameOrBin); is_binary(NameOrBin),
+ is_list(NameInArchive), is_list(Options) ->
+ do_add(Reader, NameOrBin, NameInArchive, Options).
+
+do_add(#reader{access=write}=Reader, Name, NameInArchive, Options)
+ when is_list(NameInArchive), is_list(Options) ->
+ Opts = #add_opts{read_info=fun(F) -> file:read_link_info(F) end},
+ add1(Reader, Name, NameInArchive, add_opts(Options, Opts));
+do_add(#reader{access=read},_,_,_) ->
+ {error, eacces};
+do_add(Reader,_,_,_) ->
+ {error, {badarg, Reader}}.
+
+add_opts([dereference|T], Opts) ->
+ add_opts(T, Opts#add_opts{read_info=fun(F) -> file:read_file_info(F) end});
+add_opts([verbose|T], Opts) ->
+ add_opts(T, Opts#add_opts{verbose=true});
+add_opts([{chunks,N}|T], Opts) ->
+ add_opts(T, Opts#add_opts{chunk_size=N});
+add_opts([_|T], Opts) ->
+ add_opts(T, Opts);
+add_opts([], Opts) ->
+ Opts.
+
+add1(#reader{}=Reader, Name, NameInArchive, #add_opts{read_info=ReadInfo}=Opts)
+ when is_list(Name) ->
+ Res = case ReadInfo(Name) of
+ {error, Reason0} ->
+ {error, {Name, Reason0}};
+ {ok, #file_info{type=symlink}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ {ok, Linkname} = file:read_link(Name),
+ Header = fileinfo_to_header(NameInArchive, Fi, Linkname),
+ add_header(Reader, Header, Opts);
+ {ok, #file_info{type=regular}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Fi, false),
+ {ok, Reader2} = add_header(Reader, Header, Opts),
+ FileSize = Header#tar_header.size,
+ {ok, FileSize, Reader3} = do_copy(Reader2, Name, Opts),
+ Padding = skip_padding(FileSize),
+ Pad = <<0:Padding/unit:8>>,
+ do_write(Reader3, Pad);
+ {ok, #file_info{type=directory}=Fi} ->
+ add_directory(Reader, Name, NameInArchive, Fi, Opts);
+ {ok, #file_info{}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Fi, false),
+ add_header(Reader, Header, Opts)
+ end,
+ case Res of
+ ok -> ok;
+ {ok, _Reader} -> ok;
+ {error, _Reason} = Err -> Err
+ end;
+add1(Reader, Bin, NameInArchive, Opts) when is_binary(Bin) ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Now = calendar:now_to_local_time(erlang:timestamp()),
+ Header = #tar_header{
+ name = NameInArchive,
+ size = byte_size(Bin),
+ typeflag = ?TYPE_REGULAR,
+ atime = Now,
+ mtime = Now,
+ ctime = Now,
+ mode = 8#100644},
+ {ok, Reader2} = add_header(Reader, Header, Opts),
+ Padding = skip_padding(byte_size(Bin)),
+ Data = [Bin, <<0:Padding/unit:8>>],
+ case do_write(Reader2, Data) of
+ {ok, _Reader3} -> ok;
+ {error, Reason} -> {error, {NameInArchive, Reason}}
+ end.
+
+add_directory(Reader, DirName, NameInArchive, Info, Opts) ->
case file:list_dir(DirName) of
- {ok, []} ->
- add_verbose(Options, "a ~ts~n", [DirName]),
- Header = create_header(NameInArchive, Info),
- do_write(TarFile, Header);
- {ok, Files} ->
- Add = fun (File) ->
- add1(TarFile,
- filename:join(DirName, File),
- filename:join(NameInArchive, File),
- Options) end,
- foreach_while_ok(Add, Files);
- {error, Reason} ->
- {error, {DirName, Reason}}
+ {ok, []} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Info, false),
+ add_header(Reader, Header, Opts);
+ {ok, Files} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ try add_files(Reader, Files, DirName, NameInArchive, Opts) of
+ ok -> ok;
+ {error, _} = Err -> Err
+ catch
+ throw:{error, {_Name, _Reason}} = Err -> Err;
+ throw:{error, Reason} -> {error, {DirName, Reason}}
+ end;
+ {error, Reason} ->
+ {error, {DirName, Reason}}
end.
-
-%% Creates a header for file in a tar file.
-
-create_header(Name, Info) ->
- create_header(Name, Info, []).
-create_header(Name, #file_info {mode=Mode, uid=Uid, gid=Gid,
- size=Size, mtime=Mtime0, type=Type}, Linkname) ->
- Mtime = posix_time(erlang:localtime_to_universaltime(Mtime0)),
- {Prefix,Suffix} = split_filename(Name),
- H0 = [to_string(Suffix, 100),
- to_octal(Mode, 8),
- to_octal(Uid, 8),
- to_octal(Gid, 8),
- to_octal(Size, ?th_size_len),
- to_octal(Mtime, ?th_mtime_len),
- <<" ">>,
- file_type(Type),
- to_string(Linkname, ?th_linkname_len),
- "ustar",0,
- "00",
- zeroes(?th_prefix-?th_version-?th_version_len),
- to_string(Prefix, ?th_prefix_len)],
- H = list_to_binary(H0),
- 512 = byte_size(H), %Assertion.
- ChksumString = to_octal(checksum(H), 6, [0,$\s]),
- <<Before:?th_chksum/binary,_:?th_chksum_len/binary,After/binary>> = H,
- [Before,ChksumString,After].
-
-file_type(regular) -> $0;
-file_type(symlink) -> $2;
-file_type(directory) -> $5.
-
-to_octal(Int, Count) when Count > 1 ->
- to_octal(Int, Count-1, [0]).
-
-to_octal(_, 0, Result) -> Result;
-to_octal(Int, Count, Result) ->
- to_octal(Int div 8, Count-1, [Int rem 8 + $0|Result]).
-
-to_string(Str0, Count) ->
- Str = case file:native_name_encoding() of
- utf8 ->
- unicode:characters_to_binary(Str0);
- latin1 ->
- list_to_binary(Str0)
- end,
- case byte_size(Str) of
- Size when Size < Count ->
- [Str|zeroes(Count-Size)];
- _ -> Str
+
+add_files(_Reader, [], _Dir, _DirInArchive, _Opts) ->
+ ok;
+add_files(Reader, [Name|Rest], Dir, DirInArchive, #add_opts{read_info=Info}=Opts) ->
+ FullName = filename:join(Dir, Name),
+ NameInArchive = filename:join(DirInArchive, Name),
+ Res = case Info(FullName) of
+ {error, Reason} ->
+ {error, {FullName, Reason}};
+ {ok, #file_info{type=directory}=Fi} ->
+ add_directory(Reader, FullName, NameInArchive, Fi, Opts);
+ {ok, #file_info{type=symlink}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ {ok, Linkname} = file:read_link(FullName),
+ Header = fileinfo_to_header(NameInArchive, Fi, Linkname),
+ add_header(Reader, Header, Opts);
+ {ok, #file_info{type=regular}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Fi, false),
+ {ok, Reader2} = add_header(Reader, Header, Opts),
+ FileSize = Header#tar_header.size,
+ {ok, FileSize, Reader3} = do_copy(Reader2, FullName, Opts),
+ Padding = skip_padding(FileSize),
+ Pad = <<0:Padding/unit:8>>,
+ do_write(Reader3, Pad);
+ {ok, #file_info{}=Fi} ->
+ add_verbose(Opts, "a ~ts~n", [NameInArchive]),
+ Header = fileinfo_to_header(NameInArchive, Fi, false),
+ add_header(Reader, Header, Opts)
+ end,
+ case Res of
+ ok -> add_files(Reader, Rest, Dir, DirInArchive, Opts);
+ {ok, ReaderNext} -> add_files(ReaderNext, Rest, Dir, DirInArchive, Opts);
+ {error, _} = Err -> Err
end.
-%% Pads out end of file.
-
-pad_file(File) ->
- {ok,Position} = do_position(File, {cur,0}),
- %% There must be at least two zero records at the end.
- Fill = case ?block_size - (Position rem ?block_size) of
- Fill0 when Fill0 < 2*?record_size ->
- %% We need to another block here to ensure that there
- %% are at least two zero records at the end.
- Fill0 + ?block_size;
- Fill0 ->
- %% Large enough.
- Fill0
- end,
- do_write(File, zeroes(Fill)).
-
-split_filename(Name) when length(Name) =< ?th_name_len ->
- {"", Name};
-split_filename(Name0) ->
- split_filename(lists:reverse(filename:split(Name0)), [], [], 0).
-
-split_filename([Comp|Rest], Prefix, Suffix, Len)
- when Len+length(Comp) < ?th_name_len ->
- split_filename(Rest, Prefix, [Comp|Suffix], Len+length(Comp)+1);
-split_filename([Comp|Rest], Prefix, Suffix, Len) ->
- split_filename(Rest, [Comp|Prefix], Suffix, Len+length(Comp)+1);
-split_filename([], Prefix, Suffix, _) ->
- {filename:join(Prefix),filename:join(Suffix)}.
-
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%
-%%% Retrieving files from a tape archive.
-%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% Options used when reading a tar archive.
-
--record(read_opts,
- {cwd :: string(), % Current working directory.
- keep_old_files = false :: boolean(), % Owerwrite or not.
- files = all, % Set of files to extract
- % (or all).
- output = file :: 'file' | 'memory',
- open_mode = [], % Open mode options.
- verbose = false :: boolean()}). % Verbose on/off.
+format_string(String, Size) when length(String) > Size ->
+ throw({error, {write_string, field_too_long}});
+format_string(String, Size) ->
+ Ascii = to_ascii(String),
+ if byte_size(Ascii) < Size ->
+ [Ascii, 0];
+ true ->
+ Ascii
+ end.
-extract_opts(List) ->
- extract_opts(List, default_options()).
+format_octal(Octal) ->
+ iolist_to_binary(io_lib:fwrite("~.8B", [Octal])).
+
+add_header(#reader{}=Reader, #tar_header{}=Header, Opts) ->
+ {ok, Iodata} = build_header(Header, Opts),
+ do_write(Reader, Iodata).
+
+write_to_block(Block, IoData, Start) when is_list(IoData) ->
+ write_to_block(Block, iolist_to_binary(IoData), Start);
+write_to_block(Block, Bin, Start) when is_binary(Bin) ->
+ Size = byte_size(Bin),
+ <<Head:Start/unit:8, _:Size/unit:8, Rest/binary>> = Block,
+ <<Head:Start/unit:8, Bin/binary, Rest/binary>>.
+
+build_header(#tar_header{}=Header, Opts) ->
+ #tar_header{
+ name=Name,
+ mode=Mode,
+ uid=Uid,
+ gid=Gid,
+ size=Size,
+ typeflag=Type,
+ linkname=Linkname,
+ uname=Uname,
+ gname=Gname,
+ devmajor=Devmaj,
+ devminor=Devmin
+ } = Header,
+ Mtime = datetime_to_posix(Header#tar_header.mtime),
+
+ Block0 = ?ZERO_BLOCK,
+ {Block1, Pax0} = write_string(Block0, ?V7_NAME, ?V7_NAME_LEN, Name, ?PAX_PATH, #{}),
+ Block2 = write_octal(Block1, ?V7_MODE, ?V7_MODE_LEN, Mode),
+ {Block3, Pax1} = write_numeric(Block2, ?V7_UID, ?V7_UID_LEN, Uid, ?PAX_UID, Pax0),
+ {Block4, Pax2} = write_numeric(Block3, ?V7_GID, ?V7_GID_LEN, Gid, ?PAX_GID, Pax1),
+ {Block5, Pax3} = write_numeric(Block4, ?V7_SIZE, ?V7_SIZE_LEN, Size, ?PAX_SIZE, Pax2),
+ {Block6, Pax4} = write_numeric(Block5, ?V7_MTIME, ?V7_MTIME_LEN, Mtime, ?PAX_NONE, Pax3),
+ {Block7, Pax5} = write_string(Block6, ?V7_TYPE, ?V7_TYPE_LEN, <<Type>>, ?PAX_NONE, Pax4),
+ {Block8, Pax6} = write_string(Block7, ?V7_LINKNAME, ?V7_LINKNAME_LEN,
+ Linkname, ?PAX_LINKPATH, Pax5),
+ {Block9, Pax7} = write_string(Block8, ?USTAR_UNAME, ?USTAR_UNAME_LEN,
+ Uname, ?PAX_UNAME, Pax6),
+ {Block10, Pax8} = write_string(Block9, ?USTAR_GNAME, ?USTAR_GNAME_LEN,
+ Gname, ?PAX_GNAME, Pax7),
+ {Block11, Pax9} = write_numeric(Block10, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN,
+ Devmaj, ?PAX_NONE, Pax8),
+ {Block12, Pax10} = write_numeric(Block11, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN,
+ Devmin, ?PAX_NONE, Pax9),
+ {Block13, Pax11} = set_path(Block12, Pax10),
+ PaxEntry = case maps:size(Pax11) of
+ 0 -> [];
+ _ -> build_pax_entry(Header, Pax11, Opts)
+ end,
+ Block14 = set_format(Block13, ?FORMAT_USTAR),
+ Block15 = set_checksum(Block14),
+ {ok, [PaxEntry, Block15]}.
+
+set_path(Block0, Pax) ->
+ %% only use ustar header when name is too long
+ case maps:get(?PAX_PATH, Pax, nil) of
+ nil ->
+ {Block0, Pax};
+ PaxPath ->
+ case split_ustar_path(PaxPath) of
+ {ok, UstarName, UstarPrefix} ->
+ {Block1, _} = write_string(Block0, ?V7_NAME, ?V7_NAME_LEN,
+ UstarName, ?PAX_NONE, #{}),
+ {Block2, _} = write_string(Block1, ?USTAR_PREFIX, ?USTAR_PREFIX_LEN,
+ UstarPrefix, ?PAX_NONE, #{}),
+ {Block2, maps:remove(?PAX_PATH, Pax)};
+ false ->
+ {Block0, Pax}
+ end
+ end.
-table_opts(List) ->
- read_opts(List, default_options()).
+set_format(Block0, Format)
+ when Format =:= ?FORMAT_USTAR; Format =:= ?FORMAT_PAX ->
+ Block1 = write_to_block(Block0, ?MAGIC_USTAR, ?USTAR_MAGIC),
+ write_to_block(Block1, ?VERSION_USTAR, ?USTAR_VERSION);
+set_format(_Block, Format) ->
+ throw({error, {invalid_format, Format}}).
+
+set_checksum(Block) ->
+ Checksum = compute_checksum(Block),
+ write_octal(Block, ?V7_CHKSUM, ?V7_CHKSUM_LEN, Checksum).
+
+build_pax_entry(Header, PaxAttrs, Opts) ->
+ Path = Header#tar_header.name,
+ Filename = filename:basename(Path),
+ Dir = filename:dirname(Path),
+ Path2 = filename:join([Dir, "PaxHeaders.0", Filename]),
+ AsciiPath = to_ascii(Path2),
+ Path3 = if byte_size(AsciiPath) > ?V7_NAME_LEN ->
+ binary_part(AsciiPath, 0, ?V7_NAME_LEN - 1);
+ true ->
+ AsciiPath
+ end,
+ Keys = maps:keys(PaxAttrs),
+ SortedKeys = lists:sort(Keys),
+ PaxFile = build_pax_file(SortedKeys, PaxAttrs),
+ Size = byte_size(PaxFile),
+ Padding = (?BLOCK_SIZE -
+ (byte_size(PaxFile) rem ?BLOCK_SIZE)) rem ?BLOCK_SIZE,
+ Pad = <<0:Padding/unit:8>>,
+ PaxHeader = #tar_header{
+ name=unicode:characters_to_list(Path3),
+ size=Size,
+ mtime=Header#tar_header.mtime,
+ atime=Header#tar_header.atime,
+ ctime=Header#tar_header.ctime,
+ typeflag=?TYPE_X_HEADER
+ },
+ {ok, PaxHeaderData} = build_header(PaxHeader, Opts),
+ [PaxHeaderData, PaxFile, Pad].
+
+build_pax_file(Keys, PaxAttrs) ->
+ build_pax_file(Keys, PaxAttrs, []).
+build_pax_file([], _, Acc) ->
+ unicode:characters_to_binary(Acc);
+build_pax_file([K|Rest], Attrs, Acc) ->
+ V = maps:get(K, Attrs),
+ Size = sizeof(K) + sizeof(V) + 3,
+ Size2 = sizeof(Size) + Size,
+ Key = to_string(K),
+ Value = to_string(V),
+ Record = unicode:characters_to_binary(io_lib:format("~B ~ts=~ts\n", [Size2, Key, Value])),
+ if byte_size(Record) =/= Size2 ->
+ Size3 = byte_size(Record),
+ Record2 = io_lib:format("~B ~ts=~ts\n", [Size3, Key, Value]),
+ build_pax_file(Rest, Attrs, [Acc, Record2]);
+ true ->
+ build_pax_file(Rest, Attrs, [Acc, Record])
+ end.
-default_options() ->
- {ok, Cwd} = file:get_cwd(),
- #read_opts{cwd=Cwd}.
+sizeof(Bin) when is_binary(Bin) ->
+ byte_size(Bin);
+sizeof(List) when is_list(List) ->
+ length(List);
+sizeof(N) when is_integer(N) ->
+ byte_size(integer_to_binary(N));
+sizeof(N) when is_float(N) ->
+ byte_size(float_to_binary(N)).
+
+to_string(Bin) when is_binary(Bin) ->
+ unicode:characters_to_list(Bin);
+to_string(List) when is_list(List) ->
+ List;
+to_string(N) when is_integer(N) ->
+ integer_to_list(N);
+to_string(N) when is_float(N) ->
+ float_to_list(N).
+
+split_ustar_path(Path) ->
+ Len = length(Path),
+ NotAscii = not is_ascii(Path),
+ if Len =< ?V7_NAME_LEN; NotAscii ->
+ false;
+ true ->
+ PathBin = binary:list_to_bin(Path),
+ case binary:split(PathBin, [<<$/>>], [global, trim_all]) of
+ [Part] when byte_size(Part) >= ?V7_NAME_LEN ->
+ false;
+ Parts ->
+ case lists:last(Parts) of
+ Name when byte_size(Name) >= ?V7_NAME_LEN ->
+ false;
+ Name ->
+ Parts2 = lists:sublist(Parts, length(Parts) - 1),
+ join_split_ustar_path(Parts2, {ok, Name, nil})
+ end
+ end
+ end.
-%% Parse options for extract.
+join_split_ustar_path([], Acc) ->
+ Acc;
+join_split_ustar_path([Part|_], {ok, _, nil})
+ when byte_size(Part) > ?USTAR_PREFIX_LEN ->
+ false;
+join_split_ustar_path([Part|_], {ok, _Name, Acc})
+ when (byte_size(Part)+byte_size(Acc)) > ?USTAR_PREFIX_LEN ->
+ false;
+join_split_ustar_path([Part|Rest], {ok, Name, nil}) ->
+ join_split_ustar_path(Rest, {ok, Name, Part});
+join_split_ustar_path([Part|Rest], {ok, Name, Acc}) ->
+ join_split_ustar_path(Rest, {ok, Name, <<Acc/binary,$/,Part/binary>>}).
+
+datetime_to_posix(DateTime) ->
+ Epoch = calendar:datetime_to_gregorian_seconds(?EPOCH),
+ Secs = calendar:datetime_to_gregorian_seconds(DateTime),
+ case Secs - Epoch of
+ N when N < 0 -> 0;
+ N -> N
+ end.
-extract_opts([keep_old_files|Rest], Opts) ->
- extract_opts(Rest, Opts#read_opts{keep_old_files=true});
-extract_opts([{cwd, Cwd}|Rest], Opts) ->
- extract_opts(Rest, Opts#read_opts{cwd=Cwd});
-extract_opts([{files, Files}|Rest], Opts) ->
- Set = ordsets:from_list(Files),
- extract_opts(Rest, Opts#read_opts{files=Set});
-extract_opts([memory|Rest], Opts) ->
- extract_opts(Rest, Opts#read_opts{output=memory});
-extract_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
- extract_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]});
-extract_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
- extract_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]});
-extract_opts([verbose|Rest], Opts) ->
- extract_opts(Rest, Opts#read_opts{verbose=true});
-extract_opts([Other|Rest], Opts) ->
- extract_opts(Rest, read_opts([Other], Opts));
-extract_opts([], Opts) ->
- Opts.
+write_octal(Block, Pos, Size, X) ->
+ Octal = zero_pad(format_octal(X), Size-1),
+ if byte_size(Octal) < Size ->
+ write_to_block(Block, Octal, Pos);
+ true ->
+ throw({error, {write_failed, octal_field_too_long}})
+ end.
-%% Common options for all read operations.
+write_string(Block, Pos, Size, Str, PaxAttr, Pax0) ->
+ NotAscii = not is_ascii(Str),
+ if PaxAttr =/= ?PAX_NONE andalso (length(Str) > Size orelse NotAscii) ->
+ Pax1 = maps:put(PaxAttr, Str, Pax0),
+ {Block, Pax1};
+ true ->
+ Formatted = format_string(Str, Size),
+ {write_to_block(Block, Formatted, Pos), Pax0}
+ end.
+write_numeric(Block, Pos, Size, X, PaxAttr, Pax0) ->
+ %% attempt octal
+ Octal = zero_pad(format_octal(X), Size-1),
+ if byte_size(Octal) < Size ->
+ {write_to_block(Block, [Octal, 0], Pos), Pax0};
+ PaxAttr =/= ?PAX_NONE ->
+ Pax1 = maps:put(PaxAttr, X, Pax0),
+ {Block, Pax1};
+ true ->
+ throw({error, {write_failed, numeric_field_too_long}})
+ end.
-read_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
- read_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]});
-read_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
- read_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]});
-read_opts([verbose|Rest], Opts) ->
- read_opts(Rest, Opts#read_opts{verbose=true});
-read_opts([_|Rest], Opts) ->
- read_opts(Rest, Opts);
-read_opts([], Opts) ->
- Opts.
+zero_pad(Str, Size) when byte_size(Str) >= Size ->
+ Str;
+zero_pad(Str, Size) ->
+ Padding = Size - byte_size(Str),
+ Pad = binary:copy(<<$0>>, Padding),
+ <<Pad/binary, Str/binary>>.
-foldl_read({AccessMode,TD={tar_descriptor,_UsrHandle,_AccessFun}}, Fun, Accu, Opts) ->
- case AccessMode of
- read ->
- foldl_read0(TD, Fun, Accu, Opts);
- _ ->
- {error,{read_mode_expected,AccessMode}}
- end;
-foldl_read(TarName, Fun, Accu, Opts) ->
- case open(TarName, [read|Opts#read_opts.open_mode]) of
- {ok, {read, File}} ->
- Result = foldl_read0(File, Fun, Accu, Opts),
- ok = do_close(File),
- Result;
- Error ->
- Error
+
+%%%================================================================
+%% Functions for creating or modifying tar archives
+
+read_block(Reader) ->
+ case do_read(Reader, ?BLOCK_SIZE) of
+ eof ->
+ throw({error, eof});
+ %% Two zero blocks mark the end of the archive
+ {ok, ?ZERO_BLOCK, Reader1} ->
+ case do_read(Reader1, ?BLOCK_SIZE) of
+ eof ->
+ % This is technically a malformed end-of-archive marker,
+ % as two ZERO_BLOCKs are expected as the marker,
+ % but if we've already made it this far, we should just ignore it
+ eof;
+ {ok, ?ZERO_BLOCK, _Reader2} ->
+ eof;
+ {ok, _Block, _Reader2} ->
+ throw({error, invalid_end_of_archive});
+ {error,_} = Err ->
+ throw(Err)
+ end;
+ {ok, Block, Reader1} when is_binary(Block) ->
+ {ok, Block, Reader1};
+ {error, _} = Err ->
+ throw(Err)
end.
-foldl_read0(File, Fun, Accu, Opts) ->
- case catch foldl_read1(Fun, Accu, File, Opts) of
- {'EXIT', Reason} ->
- exit(Reason);
- {error, {Reason, Format, Args}} ->
- read_verbose(Opts, Format, Args),
- {error, Reason};
- {error, Reason} ->
- {error, Reason};
- Ok ->
- Ok
+get_header(#reader{}=Reader) ->
+ case read_block(Reader) of
+ eof ->
+ eof;
+ {ok, Block, Reader1} ->
+ convert_header(Block, Reader1)
end.
-foldl_read1(Fun, Accu0, File, Opts) ->
- case get_header(File) of
- eof ->
- Fun(eof, File, Opts, Accu0);
- Header ->
- {ok, NewAccu} = Fun(Header, File, Opts, Accu0),
- foldl_read1(Fun, NewAccu, File, Opts)
+%% Converts the tar header to a record.
+to_v7(Bin) when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ #header_v7{
+ name=binary_part(Bin, ?V7_NAME, ?V7_NAME_LEN),
+ mode=binary_part(Bin, ?V7_MODE, ?V7_MODE_LEN),
+ uid=binary_part(Bin, ?V7_UID, ?V7_UID_LEN),
+ gid=binary_part(Bin, ?V7_GID, ?V7_GID_LEN),
+ size=binary_part(Bin, ?V7_SIZE, ?V7_SIZE_LEN),
+ mtime=binary_part(Bin, ?V7_MTIME, ?V7_MTIME_LEN),
+ checksum=binary_part(Bin, ?V7_CHKSUM, ?V7_CHKSUM_LEN),
+ typeflag=binary:at(Bin, ?V7_TYPE),
+ linkname=binary_part(Bin, ?V7_LINKNAME, ?V7_LINKNAME_LEN)
+ };
+to_v7(_) ->
+ {error, header_block_too_small}.
+
+to_gnu(#header_v7{}=V7, Bin)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ #header_gnu{
+ header_v7=V7,
+ magic=binary_part(Bin, ?GNU_MAGIC, ?GNU_MAGIC_LEN),
+ version=binary_part(Bin, ?GNU_VERSION, ?GNU_VERSION_LEN),
+ uname=binary_part(Bin, 265, 32),
+ gname=binary_part(Bin, 297, 32),
+ devmajor=binary_part(Bin, 329, 8),
+ devminor=binary_part(Bin, 337, 8),
+ atime=binary_part(Bin, 345, 12),
+ ctime=binary_part(Bin, 357, 12),
+ sparse=to_sparse_array(binary_part(Bin, 386, 24*4+1)),
+ real_size=binary_part(Bin, 483, 12)
+ }.
+
+to_star(#header_v7{}=V7, Bin)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ #header_star{
+ header_v7=V7,
+ magic=binary_part(Bin, ?USTAR_MAGIC, ?USTAR_MAGIC_LEN),
+ version=binary_part(Bin, ?USTAR_VERSION, ?USTAR_VERSION_LEN),
+ uname=binary_part(Bin, ?USTAR_UNAME, ?USTAR_UNAME_LEN),
+ gname=binary_part(Bin, ?USTAR_GNAME, ?USTAR_GNAME_LEN),
+ devmajor=binary_part(Bin, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN),
+ devminor=binary_part(Bin, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN),
+ prefix=binary_part(Bin, 345, 131),
+ atime=binary_part(Bin, 476, 12),
+ ctime=binary_part(Bin, 488, 12),
+ trailer=binary_part(Bin, ?STAR_TRAILER, ?STAR_TRAILER_LEN)
+ }.
+
+to_ustar(#header_v7{}=V7, Bin)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ #header_ustar{
+ header_v7=V7,
+ magic=binary_part(Bin, ?USTAR_MAGIC, ?USTAR_MAGIC_LEN),
+ version=binary_part(Bin, ?USTAR_VERSION, ?USTAR_VERSION_LEN),
+ uname=binary_part(Bin, ?USTAR_UNAME, ?USTAR_UNAME_LEN),
+ gname=binary_part(Bin, ?USTAR_GNAME, ?USTAR_GNAME_LEN),
+ devmajor=binary_part(Bin, ?USTAR_DEVMAJ, ?USTAR_DEVMAJ_LEN),
+ devminor=binary_part(Bin, ?USTAR_DEVMIN, ?USTAR_DEVMIN_LEN),
+ prefix=binary_part(Bin, 345, 155)
+ }.
+
+to_sparse_array(Bin) when is_binary(Bin) ->
+ MaxEntries = byte_size(Bin) div 24,
+ IsExtended = 1 =:= binary:at(Bin, 24*MaxEntries),
+ Entries = parse_sparse_entries(Bin, MaxEntries-1, []),
+ #sparse_array{
+ entries=Entries,
+ max_entries=MaxEntries,
+ is_extended=IsExtended
+ }.
+
+parse_sparse_entries(<<>>, _, Acc) ->
+ Acc;
+parse_sparse_entries(_, -1, Acc) ->
+ Acc;
+parse_sparse_entries(Bin, N, Acc) ->
+ case to_sparse_entry(binary_part(Bin, N*24, 24)) of
+ nil ->
+ parse_sparse_entries(Bin, N-1, Acc);
+ Entry = #sparse_entry{} ->
+ parse_sparse_entries(Bin, N-1, [Entry|Acc])
end.
-table1(eof, _, _, Result) ->
- {ok, lists:reverse(Result)};
-table1(Header = #tar_header{}, File, #read_opts{verbose=true}, Result) ->
- #tar_header{name=Name, size=Size, mtime=Mtime, typeflag=Type,
- mode=Mode, uid=Uid, gid=Gid} = Header,
- skip(File, Size),
- {ok, [{Name, Type, Size, posix_to_erlang_time(Mtime), Mode, Uid, Gid}|Result]};
-table1(#tar_header{name=Name, size=Size}, File, _, Result) ->
- skip(File, Size),
- {ok, [Name|Result]}.
-
-extract1(eof, _, _, Acc) ->
- if
- is_list(Acc) ->
- {ok, lists:reverse(Acc)};
- true ->
- Acc
- end;
-extract1(Header, File, Opts, Acc) ->
- Name = Header#tar_header.name,
- case check_extract(Name, Opts) of
- true ->
- {ok, Bin} = get_element(File, Header),
- case write_extracted_element(Header, Bin, Opts) of
- ok ->
- {ok, Acc};
- {ok, NameBin} when is_list(Acc) ->
- {ok, [NameBin | Acc]};
- {ok, NameBin} when Acc =:= ok ->
- {ok, [NameBin]}
- end;
- false ->
- ok = skip(File, Header#tar_header.size),
- {ok, Acc}
+-define(EMPTY_ENTRY, <<0,0,0,0,0,0,0,0,0,0,0,0>>).
+to_sparse_entry(Bin) when is_binary(Bin), byte_size(Bin) =:= 24 ->
+ OffsetBin = binary_part(Bin, 0, 12),
+ NumBytesBin = binary_part(Bin, 12, 12),
+ case {OffsetBin, NumBytesBin} of
+ {?EMPTY_ENTRY, ?EMPTY_ENTRY} ->
+ nil;
+ _ ->
+ #sparse_entry{
+ offset=parse_numeric(OffsetBin),
+ num_bytes=parse_numeric(NumBytesBin)}
end.
-%% Checks if the file Name should be extracted.
+-spec get_format(binary()) -> {ok, pos_integer(), header_v7()}
+ | ?FORMAT_UNKNOWN
+ | {error, term()}.
+get_format(Bin) when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ do_get_format(to_v7(Bin), Bin).
+
+do_get_format({error, _} = Err, _Bin) ->
+ Err;
+do_get_format(#header_v7{}=V7, Bin)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ Checksum = parse_octal(V7#header_v7.checksum),
+ Chk1 = compute_checksum(Bin),
+ Chk2 = compute_signed_checksum(Bin),
+ if Checksum =/= Chk1 andalso Checksum =/= Chk2 ->
+ ?FORMAT_UNKNOWN;
+ true ->
+ %% guess magic
+ Ustar = to_ustar(V7, Bin),
+ Star = to_star(V7, Bin),
+ Magic = Ustar#header_ustar.magic,
+ Version = Ustar#header_ustar.version,
+ Trailer = Star#header_star.trailer,
+ Format = if
+ Magic =:= ?MAGIC_USTAR, Trailer =:= ?TRAILER_STAR ->
+ ?FORMAT_STAR;
+ Magic =:= ?MAGIC_USTAR ->
+ ?FORMAT_USTAR;
+ Magic =:= ?MAGIC_GNU, Version =:= ?VERSION_GNU ->
+ ?FORMAT_GNU;
+ true ->
+ ?FORMAT_V7
+ end,
+ {ok, Format, V7}
+ end.
-check_extract(_, #read_opts{files=all}) ->
+unpack_format(Format, #header_v7{}=V7, Bin, Reader)
+ when is_binary(Bin), byte_size(Bin) =:= ?BLOCK_SIZE ->
+ Mtime = posix_to_erlang_time(parse_numeric(V7#header_v7.mtime)),
+ Header0 = #tar_header{
+ name=parse_string(V7#header_v7.name),
+ mode=parse_numeric(V7#header_v7.mode),
+ uid=parse_numeric(V7#header_v7.uid),
+ gid=parse_numeric(V7#header_v7.gid),
+ size=parse_numeric(V7#header_v7.size),
+ mtime=Mtime,
+ atime=Mtime,
+ ctime=Mtime,
+ typeflag=V7#header_v7.typeflag,
+ linkname=parse_string(V7#header_v7.linkname)
+ },
+ Typeflag = Header0#tar_header.typeflag,
+ Header1 = if Format > ?FORMAT_V7 ->
+ unpack_modern(Format, V7, Bin, Header0);
+ true ->
+ Name = Header0#tar_header.name,
+ Header0#tar_header{name=safe_join_path("", Name)}
+ end,
+ HeaderOnly = is_header_only_type(Typeflag),
+ Header2 = if HeaderOnly ->
+ Header1#tar_header{size=0};
+ true ->
+ Header1
+ end,
+ if Typeflag =:= ?TYPE_GNU_SPARSE ->
+ Gnu = to_gnu(V7, Bin),
+ RealSize = parse_numeric(Gnu#header_gnu.real_size),
+ {Sparsemap, Reader2} = parse_sparse_map(Gnu, Reader),
+ Header3 = Header2#tar_header{size=RealSize},
+ {Header3, new_sparse_file_reader(Reader2, Sparsemap, RealSize)};
+ true ->
+ FileReader = #reg_file_reader{
+ handle=Reader,
+ num_bytes=Header2#tar_header.size,
+ size=Header2#tar_header.size,
+ pos = 0
+ },
+ {Header2, FileReader}
+ end.
+
+unpack_modern(Format, #header_v7{}=V7, Bin, #tar_header{}=Header0)
+ when is_binary(Bin) ->
+ Typeflag = Header0#tar_header.typeflag,
+ Ustar = to_ustar(V7, Bin),
+ H0 = Header0#tar_header{
+ uname=parse_string(Ustar#header_ustar.uname),
+ gname=parse_string(Ustar#header_ustar.gname)},
+ H1 = if Typeflag =:= ?TYPE_CHAR
+ orelse Typeflag =:= ?TYPE_BLOCK ->
+ Ma = parse_numeric(Ustar#header_ustar.devmajor),
+ Mi = parse_numeric(Ustar#header_ustar.devminor),
+ H0#tar_header{
+ devmajor=Ma,
+ devminor=Mi
+ };
+ true ->
+ H0
+ end,
+ {Prefix, H2} = case Format of
+ ?FORMAT_USTAR ->
+ {parse_string(Ustar#header_ustar.prefix), H1};
+ ?FORMAT_STAR ->
+ Star = to_star(V7, Bin),
+ Prefix0 = parse_string(Star#header_star.prefix),
+ Atime0 = Star#header_star.atime,
+ Atime = posix_to_erlang_time(parse_numeric(Atime0)),
+ Ctime0 = Star#header_star.ctime,
+ Ctime = posix_to_erlang_time(parse_numeric(Ctime0)),
+ {Prefix0, H1#tar_header{
+ atime=Atime,
+ ctime=Ctime
+ }};
+ _ ->
+ {"", H1}
+ end,
+ Name = H2#tar_header.name,
+ H2#tar_header{name=safe_join_path(Prefix, Name)}.
+
+
+safe_join_path([], Name) ->
+ strip_slashes(Name, both);
+safe_join_path(Prefix, []) ->
+ strip_slashes(Prefix, right);
+safe_join_path(Prefix, Name) ->
+ filename:join(strip_slashes(Prefix, right), strip_slashes(Name, both)).
+
+strip_slashes(Str, Direction) ->
+ string:strip(Str, Direction, $/).
+
+new_sparse_file_reader(Reader, Sparsemap, RealSize) ->
+ true = validate_sparse_entries(Sparsemap, RealSize),
+ #sparse_file_reader{
+ handle = Reader,
+ num_bytes = RealSize,
+ pos = 0,
+ size = RealSize,
+ sparse_map = Sparsemap}.
+
+validate_sparse_entries(Entries, RealSize) ->
+ validate_sparse_entries(Entries, RealSize, 0, 0).
+validate_sparse_entries([], _RealSize, _I, _LastOffset) ->
true;
-check_extract(Name, #read_opts{files=Files}) ->
- ordsets:is_element(Name, Files).
+validate_sparse_entries([#sparse_entry{}=Entry|Rest], RealSize, I, LastOffset) ->
+ Offset = Entry#sparse_entry.offset,
+ NumBytes = Entry#sparse_entry.num_bytes,
+ if
+ Offset > ?MAX_INT64-NumBytes ->
+ throw({error, {invalid_sparse_map_entry, offset_too_large}});
+ Offset+NumBytes > RealSize ->
+ throw({error, {invalid_sparse_map_entry, offset_too_large}});
+ I > 0 andalso LastOffset > Offset ->
+ throw({error, {invalid_sparse_map_entry, overlapping_offsets}});
+ true ->
+ ok
+ end,
+ validate_sparse_entries(Rest, RealSize, I+1, Offset+NumBytes).
+
+
+-spec parse_sparse_map(header_gnu(), reader_type()) ->
+ {[sparse_entry()], reader_type()}.
+parse_sparse_map(#header_gnu{sparse=Sparse}, Reader)
+ when Sparse#sparse_array.is_extended ->
+ parse_sparse_map(Sparse, Reader, []);
+parse_sparse_map(#header_gnu{sparse=Sparse}, Reader) ->
+ {Sparse#sparse_array.entries, Reader}.
+parse_sparse_map(#sparse_array{is_extended=true,entries=Entries}, Reader, Acc) ->
+ case read_block(Reader) of
+ eof ->
+ throw({error, eof});
+ {ok, Block, Reader2} ->
+ Sparse2 = to_sparse_array(Block),
+ parse_sparse_map(Sparse2, Reader2, Entries++Acc)
+ end;
+parse_sparse_map(#sparse_array{entries=Entries}, Reader, Acc) ->
+ Sorted = lists:sort(fun (#sparse_entry{offset=A},#sparse_entry{offset=B}) ->
+ A =< B
+ end, Entries++Acc),
+ {Sorted, Reader}.
+
+%% Defined by taking the sum of the unsigned byte values of the
+%% entire header record, treating the checksum bytes to as ASCII spaces
+compute_checksum(<<H1:?V7_CHKSUM/binary,
+ H2:?V7_CHKSUM_LEN/binary,
+ Rest:(?BLOCK_SIZE - ?V7_CHKSUM - ?V7_CHKSUM_LEN)/binary,
+ _/binary>>) ->
+ C0 = checksum(H1) + (byte_size(H2) * $\s),
+ C1 = checksum(Rest),
+ C0 + C1.
+
+compute_signed_checksum(<<H1:?V7_CHKSUM/binary,
+ H2:?V7_CHKSUM_LEN/binary,
+ Rest:(?BLOCK_SIZE - ?V7_CHKSUM - ?V7_CHKSUM_LEN)/binary,
+ _/binary>>) ->
+ C0 = signed_checksum(H1) + (byte_size(H2) * $\s),
+ C1 = signed_checksum(Rest),
+ C0 + C1.
-get_header(File) ->
- case do_read(File, ?record_size) of
- eof ->
- throw({error,eof});
- {ok, Bin} when is_binary(Bin) ->
- convert_header(Bin);
- {ok, List} ->
- convert_header(list_to_binary(List));
- {error, Reason} ->
- throw({error, Reason})
- end.
+%% Returns the checksum of a binary.
+checksum(Bin) -> checksum(Bin, 0).
+checksum(<<A/unsigned,Rest/binary>>, Sum) ->
+ checksum(Rest, Sum+A);
+checksum(<<>>, Sum) -> Sum.
-%% Converts the tar header to a record.
+signed_checksum(Bin) -> signed_checksum(Bin, 0).
+signed_checksum(<<A/signed,Rest/binary>>, Sum) ->
+ signed_checksum(Rest, Sum+A);
+signed_checksum(<<>>, Sum) -> Sum.
+
+-spec parse_numeric(binary()) -> non_neg_integer().
+parse_numeric(<<>>) ->
+ 0;
+parse_numeric(<<First, _/binary>> = Bin) ->
+ %% check for base-256 format first
+ %% if the bit is set, then all following bits constitute a two's
+ %% complement encoded number in big-endian byte order
+ if
+ First band 16#80 =/= 0 ->
+ %% Handling negative numbers relies on the following identity:
+ %% -a-1 == ^a
+ %% If the number is negative, we use an inversion mask to invert
+ %% the data bytes and treat the value as an unsigned number
+ Inv = if First band 16#40 =/= 0 -> 16#00; true -> 16#FF end,
+ Bytes = binary:bin_to_list(Bin),
+ Reducer = fun (C, {I, X}) ->
+ C1 = C bxor Inv,
+ C2 = if I =:= 0 -> C1 band 16#7F; true -> C1 end,
+ if (X bsr 56) > 0 ->
+ throw({error,integer_overflow});
+ true ->
+ {I+1, (X bsl 8) bor C2}
+ end
+ end,
+ {_, N} = lists:foldl(Reducer, {0,0}, Bytes),
+ if (N bsr 63) > 0 ->
+ throw({error, integer_overflow});
+ true ->
+ if Inv =:= 16#FF ->
+ -1 bxor N;
+ true ->
+ N
+ end
+ end;
+ true ->
+ %% normal case is an octal number
+ parse_octal(Bin)
+ end.
-convert_header(Bin) when byte_size(Bin) =:= ?record_size ->
- case verify_checksum(Bin) of
- ok ->
- Hd = #tar_header{name=get_name(Bin),
- mode=from_octal(Bin, ?th_mode, ?th_mode_len),
- uid=from_octal(Bin, ?th_uid, ?th_uid_len),
- gid=from_octal(Bin, ?th_gid, ?th_gid_len),
- size=from_octal(Bin, ?th_size, ?th_size_len),
- mtime=from_octal(Bin, ?th_mtime, ?th_mtime_len),
- linkname=from_string(Bin,
- ?th_linkname, ?th_linkname_len),
- typeflag=typeflag(Bin)},
- convert_header1(Hd);
- eof ->
- eof
+parse_octal(Bin) when is_binary(Bin) ->
+ %% skip leading/trailing zero bytes and spaces
+ do_parse_octal(Bin, <<>>).
+do_parse_octal(<<>>, <<>>) ->
+ 0;
+do_parse_octal(<<>>, Acc) ->
+ case io_lib:fread("~8u", binary:bin_to_list(Acc)) of
+ {error, _} -> throw({error, invalid_tar_checksum});
+ {ok, [Octal], []} -> Octal;
+ {ok, _, _} -> throw({error, invalid_tar_checksum})
end;
-convert_header(Bin) when byte_size(Bin) =:= 0 ->
+do_parse_octal(<<$\s,Rest/binary>>, Acc) ->
+ do_parse_octal(Rest, Acc);
+do_parse_octal(<<0, Rest/binary>>, Acc) ->
+ do_parse_octal(Rest, Acc);
+do_parse_octal(<<C, Rest/binary>>, Acc) ->
+ do_parse_octal(Rest, <<Acc/binary, C>>).
+
+parse_string(Bin) when is_binary(Bin) ->
+ do_parse_string(Bin, <<>>).
+do_parse_string(<<>>, Acc) ->
+ case unicode:characters_to_list(Acc) of
+ Str when is_list(Str) ->
+ Str;
+ {incomplete, _Str, _Rest} ->
+ binary:bin_to_list(Acc);
+ {error, _Str, _Rest} ->
+ throw({error, {bad_header, invalid_string}})
+ end;
+do_parse_string(<<0, _/binary>>, Acc) ->
+ do_parse_string(<<>>, Acc);
+do_parse_string(<<C, Rest/binary>>, Acc) ->
+ do_parse_string(Rest, <<Acc/binary, C>>).
+
+convert_header(Bin, #reader{pos=Pos}=Reader)
+ when byte_size(Bin) =:= ?BLOCK_SIZE, (Pos rem ?BLOCK_SIZE) =:= 0 ->
+ case get_format(Bin) of
+ ?FORMAT_UNKNOWN ->
+ throw({error, bad_header});
+ {ok, Format, V7} ->
+ unpack_format(Format, V7, Bin, Reader);
+ {error, Reason} ->
+ throw({error, {bad_header, Reason}})
+ end;
+convert_header(Bin, #reader{pos=Pos}) when byte_size(Bin) =:= ?BLOCK_SIZE ->
+ throw({error, misaligned_read, Pos});
+convert_header(Bin, _Reader) when byte_size(Bin) =:= 0 ->
eof;
-convert_header(_Bin) ->
+convert_header(_Bin, _Reader) ->
throw({error, eof}).
-%% Basic sanity. Better set the element size to zero here if the type
-%% always is of zero length.
-
-convert_header1(H) when H#tar_header.typeflag =:= symlink, H#tar_header.size =/= 0 ->
- convert_header1(H#tar_header{size=0});
-convert_header1(H) when H#tar_header.typeflag =:= directory, H#tar_header.size =/= 0 ->
- convert_header1(H#tar_header{size=0});
-convert_header1(Header) ->
- Header.
-
-typeflag(Bin) ->
- [T] = binary_to_list(Bin, ?th_typeflag+1, ?th_typeflag+1),
- case T of
- 0 -> regular;
- $0 -> regular;
- $1 -> link;
- $2 -> symlink;
- $3 -> char;
- $4 -> block;
- $5 -> directory;
- $6 -> fifo;
- $7 -> regular;
- _ -> unknown
+%% Creates a partially-populated header record based
+%% on the provided file_info record. If the file is
+%% a symlink, then `link` is used as the link target.
+%% If the file is a directory, a slash is appended to the name.
+fileinfo_to_header(Name, #file_info{}=Fi, Link) when is_list(Name) ->
+ BaseHeader = #tar_header{name=Name,
+ mtime=Fi#file_info.mtime,
+ atime=Fi#file_info.atime,
+ ctime=Fi#file_info.ctime,
+ mode=Fi#file_info.mode,
+ uid=Fi#file_info.uid,
+ gid=Fi#file_info.gid,
+ typeflag=?TYPE_REGULAR},
+ do_fileinfo_to_header(BaseHeader, Fi, Link).
+
+do_fileinfo_to_header(Header, #file_info{size=Size,type=regular}, _Link) ->
+ Header#tar_header{size=Size,typeflag=?TYPE_REGULAR};
+do_fileinfo_to_header(#tar_header{name=Name}=Header,
+ #file_info{type=directory}, _Link) ->
+ Header#tar_header{name=Name++"/",typeflag=?TYPE_DIR};
+do_fileinfo_to_header(Header, #file_info{type=symlink}, Link) ->
+ Header#tar_header{typeflag=?TYPE_SYMLINK,linkname=Link};
+do_fileinfo_to_header(Header, #file_info{type=device,mode=Mode}=Fi, _Link)
+ when (Mode band ?S_IFMT) =:= ?S_IFCHR ->
+ Header#tar_header{typeflag=?TYPE_CHAR,
+ devmajor=Fi#file_info.major_device,
+ devminor=Fi#file_info.minor_device};
+do_fileinfo_to_header(Header, #file_info{type=device,mode=Mode}=Fi, _Link)
+ when (Mode band ?S_IFMT) =:= ?S_IFBLK ->
+ Header#tar_header{typeflag=?TYPE_BLOCK,
+ devmajor=Fi#file_info.major_device,
+ devminor=Fi#file_info.minor_device};
+do_fileinfo_to_header(Header, #file_info{type=other,mode=Mode}, _Link)
+ when (Mode band ?S_IFMT) =:= ?S_FIFO ->
+ Header#tar_header{typeflag=?TYPE_FIFO};
+do_fileinfo_to_header(Header, Fi, _Link) ->
+ {error, {invalid_file_type, Header#tar_header.name, Fi}}.
+
+is_ascii(Str) when is_list(Str) ->
+ not lists:any(fun (Char) -> Char >= 16#80 end, Str);
+is_ascii(Bin) when is_binary(Bin) ->
+ is_ascii1(Bin).
+
+is_ascii1(<<>>) ->
+ true;
+is_ascii1(<<C,_Rest/binary>>) when C >= 16#80 ->
+ false;
+is_ascii1(<<_, Rest/binary>>) ->
+ is_ascii1(Rest).
+
+to_ascii(Str) when is_list(Str) ->
+ case is_ascii(Str) of
+ true ->
+ unicode:characters_to_binary(Str);
+ false ->
+ Chars = lists:filter(fun (Char) -> Char < 16#80 end, Str),
+ unicode:characters_to_binary(Chars)
+ end;
+to_ascii(Bin) when is_binary(Bin) ->
+ to_ascii(Bin, <<>>).
+to_ascii(<<>>, Acc) ->
+ Acc;
+to_ascii(<<C, Rest/binary>>, Acc) when C < 16#80 ->
+ to_ascii(Rest, <<Acc/binary,C>>);
+to_ascii(<<_, Rest/binary>>, Acc) ->
+ to_ascii(Rest, Acc).
+
+is_header_only_type(?TYPE_SYMLINK) -> true;
+is_header_only_type(?TYPE_LINK) -> true;
+is_header_only_type(?TYPE_DIR) -> true;
+is_header_only_type(_) -> false.
+
+posix_to_erlang_time(Sec) ->
+ OneMillion = 1000000,
+ Time = calendar:now_to_datetime({Sec div OneMillion, Sec rem OneMillion, 0}),
+ erlang:universaltime_to_localtime(Time).
+
+foldl_read(#reader{access=read}=Reader, Fun, Accu, #read_opts{}=Opts)
+ when is_function(Fun,4) ->
+ case foldl_read0(Reader, Fun, Accu, Opts) of
+ {ok, Result, _Reader2} ->
+ Result;
+ {error, _} = Err ->
+ Err
+ end;
+foldl_read(#reader{access=Access}, _Fun, _Accu, _Opts) ->
+ {error, {read_mode_expected, Access}};
+foldl_read(TarName, Fun, Accu, #read_opts{}=Opts)
+ when is_function(Fun,4) ->
+ try open(TarName, [read|Opts#read_opts.open_mode]) of
+ {ok, #reader{access=read}=Reader} ->
+ foldl_read(Reader, Fun, Accu, Opts);
+ {error, _} = Err ->
+ Err
+ catch
+ throw:Err ->
+ Err
end.
-%% Get the name of the file from the prefix and name fields of the
-%% tar header.
-
-get_name(Bin0) ->
- List0 = get_name_raw(Bin0),
- case file:native_name_encoding() of
- utf8 ->
- Bin = list_to_binary(List0),
- case unicode:characters_to_list(Bin) of
- {error,_,_} ->
- List0;
- List when is_list(List) ->
- List
- end;
- latin1 ->
- List0
+foldl_read0(Reader, Fun, Accu, Opts) ->
+ try foldl_read1(Fun, Accu, Reader, Opts, #{}) of
+ {ok,_,_} = Ok ->
+ Ok
+ catch
+ throw:{error, {Reason, Format, Args}} ->
+ read_verbose(Opts, Format, Args),
+ {error, Reason};
+ throw:Err ->
+ Err
end.
-get_name_raw(Bin) ->
- Name = from_string(Bin, ?th_name, ?th_name_len),
- case binary_to_list(Bin, ?th_prefix+1, ?th_prefix+1) of
- [0] ->
- Name;
- [_] ->
- Prefix = binary_to_list(Bin, ?th_prefix+1, byte_size(Bin)),
- lists:reverse(remove_nulls(Prefix), [$/|Name])
+foldl_read1(Fun, Accu0, Reader0, Opts, ExtraHeaders) ->
+ {ok, Reader1} = skip_unread(Reader0),
+ case get_header(Reader1) of
+ eof ->
+ Fun(eof, Reader1, Opts, Accu0);
+ {Header, Reader2} ->
+ case Header#tar_header.typeflag of
+ ?TYPE_X_HEADER ->
+ {ExtraHeaders2, Reader3} = parse_pax(Reader2),
+ ExtraHeaders3 = maps:merge(ExtraHeaders, ExtraHeaders2),
+ foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders3);
+ ?TYPE_GNU_LONGNAME ->
+ {RealName, Reader3} = get_real_name(Reader2),
+ ExtraHeaders2 = maps:put(?PAX_PATH,
+ parse_string(RealName), ExtraHeaders),
+ foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders2);
+ ?TYPE_GNU_LONGLINK ->
+ {RealName, Reader3} = get_real_name(Reader2),
+ ExtraHeaders2 = maps:put(?PAX_LINKPATH,
+ parse_string(RealName), ExtraHeaders),
+ foldl_read1(Fun, Accu0, Reader3, Opts, ExtraHeaders2);
+ _ ->
+ Header1 = merge_pax(Header, ExtraHeaders),
+ {ok, NewAccu, Reader3} = Fun(Header1, Reader2, Opts, Accu0),
+ foldl_read1(Fun, NewAccu, Reader3, Opts, #{})
+ end
end.
-from_string(Bin, Pos, Len) ->
- lists:reverse(remove_nulls(binary_to_list(Bin, Pos+1, Pos+Len))).
-
-%% Returns all characters up to (but not including) the first null
-%% character, in REVERSE order.
-
-remove_nulls(List) ->
- remove_nulls(List, []).
-
-remove_nulls([0|_], Result) ->
- remove_nulls([], Result);
-remove_nulls([C|Rest], Result) ->
- remove_nulls(Rest, [C|Result]);
-remove_nulls([], Result) ->
- Result.
-
-from_octal(Bin, Pos, Len) ->
- from_octal(binary_to_list(Bin, Pos+1, Pos+Len)).
-
-from_octal([$\s|Rest]) ->
- from_octal(Rest);
-from_octal([Digit|Rest]) when $0 =< Digit, Digit =< $7 ->
- from_octal(Rest, Digit-$0);
-from_octal(Bin) when is_binary(Bin) ->
- from_octal(binary_to_list(Bin));
-from_octal(Other) ->
- throw({error, {bad_header, "Bad octal number: ~p", [Other]}}).
-
-from_octal([Digit|Rest], Result) when $0 =< Digit, Digit =< $7 ->
- from_octal(Rest, Result*8+Digit-$0);
-from_octal([$\s|_], Result) ->
- Result;
-from_octal([0|_], Result) ->
- Result;
-from_octal(Other, _) ->
- throw({error, {bad_header, "Bad contents in octal field: ~p", [Other]}}).
-
-%% Retrieves the next element from the archive.
-%% Returns {ok, Bin} | eof | {error, Reason}
-
-get_element(File, #tar_header{size = 0}) ->
- skip_to_next(File),
- {ok,<<>>};
-get_element(File, #tar_header{size = Size}) ->
- case do_read(File, Size) of
- {ok,Bin}=Res when byte_size(Bin) =:= Size ->
- skip_to_next(File),
- Res;
- {ok,List} when length(List) =:= Size ->
- skip_to_next(File),
- {ok,list_to_binary(List)};
- {ok,_} -> throw({error,eof});
- {error, Reason} -> throw({error, Reason});
- eof -> throw({error,eof})
+%% Applies all known PAX attributes to the current tar header
+-spec merge_pax(tar_header(), #{binary() => binary()}) -> tar_header().
+merge_pax(Header, ExtraHeaders) when is_map(ExtraHeaders) ->
+ do_merge_pax(Header, maps:to_list(ExtraHeaders)).
+
+do_merge_pax(Header, []) ->
+ Header;
+do_merge_pax(Header, [{?PAX_PATH, Path}|Rest]) ->
+ do_merge_pax(Header#tar_header{name=unicode:characters_to_list(Path)}, Rest);
+do_merge_pax(Header, [{?PAX_LINKPATH, LinkPath}|Rest]) ->
+ do_merge_pax(Header#tar_header{linkname=unicode:characters_to_list(LinkPath)}, Rest);
+do_merge_pax(Header, [{?PAX_GNAME, Gname}|Rest]) ->
+ do_merge_pax(Header#tar_header{gname=unicode:characters_to_list(Gname)}, Rest);
+do_merge_pax(Header, [{?PAX_UNAME, Uname}|Rest]) ->
+ do_merge_pax(Header#tar_header{uname=unicode:characters_to_list(Uname)}, Rest);
+do_merge_pax(Header, [{?PAX_UID, Uid}|Rest]) ->
+ Uid2 = binary_to_integer(Uid),
+ do_merge_pax(Header#tar_header{uid=Uid2}, Rest);
+do_merge_pax(Header, [{?PAX_GID, Gid}|Rest]) ->
+ Gid2 = binary_to_integer(Gid),
+ do_merge_pax(Header#tar_header{gid=Gid2}, Rest);
+do_merge_pax(Header, [{?PAX_ATIME, Atime}|Rest]) ->
+ Atime2 = parse_pax_time(Atime),
+ do_merge_pax(Header#tar_header{atime=Atime2}, Rest);
+do_merge_pax(Header, [{?PAX_MTIME, Mtime}|Rest]) ->
+ Mtime2 = parse_pax_time(Mtime),
+ do_merge_pax(Header#tar_header{mtime=Mtime2}, Rest);
+do_merge_pax(Header, [{?PAX_CTIME, Ctime}|Rest]) ->
+ Ctime2 = parse_pax_time(Ctime),
+ do_merge_pax(Header#tar_header{ctime=Ctime2}, Rest);
+do_merge_pax(Header, [{?PAX_SIZE, Size}|Rest]) ->
+ Size2 = binary_to_integer(Size),
+ do_merge_pax(Header#tar_header{size=Size2}, Rest);
+do_merge_pax(Header, [{<<?PAX_XATTR_STR, _Key/binary>>, _Value}|Rest]) ->
+ do_merge_pax(Header, Rest);
+do_merge_pax(Header, [_Ignore|Rest]) ->
+ do_merge_pax(Header, Rest).
+
+%% Returns the time since UNIX epoch as a datetime
+-spec parse_pax_time(binary()) -> calendar:datetime().
+parse_pax_time(Bin) when is_binary(Bin) ->
+ TotalNano = case binary:split(Bin, [<<$.>>]) of
+ [SecondsStr, NanoStr0] ->
+ Seconds = binary_to_integer(SecondsStr),
+ if byte_size(NanoStr0) < ?MAX_NANO_INT_SIZE ->
+ %% right pad
+ PaddingN = ?MAX_NANO_INT_SIZE-byte_size(NanoStr0),
+ Padding = binary:copy(<<$0>>, PaddingN),
+ NanoStr1 = <<NanoStr0/binary,Padding/binary>>,
+ Nano = binary_to_integer(NanoStr1),
+ (Seconds*?BILLION)+Nano;
+ byte_size(NanoStr0) > ?MAX_NANO_INT_SIZE ->
+ %% right truncate
+ NanoStr1 = binary_part(NanoStr0, 0, ?MAX_NANO_INT_SIZE),
+ Nano = binary_to_integer(NanoStr1),
+ (Seconds*?BILLION)+Nano;
+ true ->
+ (Seconds*?BILLION)+binary_to_integer(NanoStr0)
+ end;
+ [SecondsStr] ->
+ binary_to_integer(SecondsStr)*?BILLION
+ end,
+ %% truncate to microseconds
+ Micro = TotalNano div 1000,
+ Mega = Micro div 1000000000000,
+ Secs = Micro div 1000000 - (Mega*1000000),
+ Micro2 = Micro rem 1000000,
+ calendar:now_to_datetime({Mega, Secs, Micro2}).
+
+%% Given a regular file reader, reads the whole file and
+%% parses all extended attributes it contains.
+parse_pax(#reg_file_reader{handle=Handle,num_bytes=0}) ->
+ {#{}, Handle};
+parse_pax(#reg_file_reader{handle=Handle0,num_bytes=NumBytes}) ->
+ case do_read(Handle0, NumBytes) of
+ {ok, Bytes, Handle1} ->
+ do_parse_pax(Handle1, Bytes, #{});
+ {error, _} = Err ->
+ throw(Err)
end.
-%% Verify the checksum in the header. First try an unsigned addition
-%% of all bytes in the header (as it should be according to Posix).
-
-verify_checksum(Bin) ->
- <<H1:?th_chksum/binary,CheckStr:?th_chksum_len/binary,H2/binary>> = Bin,
- case checksum(H1) + checksum(H2) of
- 0 -> eof;
- Checksum0 ->
- Csum = from_octal(CheckStr),
- CsumInit = ?th_chksum_len * $\s,
- case Checksum0 + CsumInit of
- Csum -> ok;
- Unsigned ->
- verify_checksum(H1, H2, CsumInit, Csum, Unsigned)
- end
+do_parse_pax(Reader, <<>>, Headers) ->
+ {Headers, Reader};
+do_parse_pax(Reader, Bin, Headers) ->
+ {Key, Value, Residual} = parse_pax_record(Bin),
+ NewHeaders = maps:put(Key, Value, Headers),
+ do_parse_pax(Reader, Residual, NewHeaders).
+
+%% Parse an extended attribute
+parse_pax_record(Bin) when is_binary(Bin) ->
+ case binary:split(Bin, [<<$\n>>]) of
+ [Record, Residual] ->
+ case binary:split(Record, [<<$\s>>], [trim_all]) of
+ [_Len, Record1] ->
+ case binary:split(Record1, [<<$=>>], [trim_all]) of
+ [AttrName, AttrValue] ->
+ {AttrName, AttrValue, Residual};
+ _Other ->
+ throw({error, malformed_pax_record})
+ end;
+ _Other ->
+ throw({error, malformed_pax_record})
+ end;
+ _Other ->
+ throw({error, malformed_pax_record})
end.
-%% The checksums didn't match. Now try a signed addition.
+get_real_name(#reg_file_reader{handle=Handle,num_bytes=0}) ->
+ {"", Handle};
+get_real_name(#reg_file_reader{handle=Handle0,num_bytes=NumBytes}) ->
+ case do_read(Handle0, NumBytes) of
+ {ok, RealName, Handle1} ->
+ {RealName, Handle1};
+ {error, _} = Err ->
+ throw(Err)
+ end;
+get_real_name(#sparse_file_reader{num_bytes=NumBytes}=Reader0) ->
+ case do_read(Reader0, NumBytes) of
+ {ok, RealName, Reader1} ->
+ {RealName, Reader1};
+ {error, _} = Err ->
+ throw(Err)
+ end.
-verify_checksum(H1, H2, Csum, ShouldBe, Unsigned) ->
- case signed_sum(binary_to_list(H1), signed_sum(binary_to_list(H2), Csum)) of
- ShouldBe -> ok;
- Signed ->
- throw({error,
- {bad_header,
- "Incorrect directory checksum ~w (~w), should be ~w",
- [Signed, Unsigned, ShouldBe]}})
+%% Skip the remaining bytes for the current file entry
+skip_file(#reg_file_reader{handle=Handle0,pos=Pos,size=Size}=Reader) ->
+ Padding = skip_padding(Size),
+ AbsPos = Handle0#reader.pos + (Size-Pos) + Padding,
+ case do_position(Handle0, AbsPos) of
+ {ok, _, Handle1} ->
+ Reader#reg_file_reader{handle=Handle1,num_bytes=0,pos=Size};
+ Err ->
+ throw(Err)
+ end;
+skip_file(#sparse_file_reader{pos=Pos,size=Size}=Reader) ->
+ case do_read(Reader, Size-Pos) of
+ {ok, _, Reader2} ->
+ Reader2;
+ Err ->
+ throw(Err)
end.
-signed_sum([C|Rest], Sum) when C < 128 ->
- signed_sum(Rest, Sum+C);
-signed_sum([C|Rest], Sum) ->
- signed_sum(Rest, Sum+C-256);
-signed_sum([], Sum) -> Sum.
-
-write_extracted_element(Header, Bin, Opts)
- when Opts#read_opts.output =:= memory ->
- case Header#tar_header.typeflag of
- regular ->
- {ok, {Header#tar_header.name, Bin}};
- _ ->
- ok
+skip_padding(0) ->
+ 0;
+skip_padding(Size) when (Size rem ?BLOCK_SIZE) =:= 0 ->
+ 0;
+skip_padding(Size) when Size =< ?BLOCK_SIZE ->
+ ?BLOCK_SIZE - Size;
+skip_padding(Size) ->
+ ?BLOCK_SIZE - (Size rem ?BLOCK_SIZE).
+
+skip_unread(#reader{pos=Pos}=Reader0) when (Pos rem ?BLOCK_SIZE) > 0 ->
+ Padding = skip_padding(Pos + ?BLOCK_SIZE),
+ AbsPos = Pos + Padding,
+ case do_position(Reader0, AbsPos) of
+ {ok, _, Reader1} ->
+ {ok, Reader1};
+ Err ->
+ throw(Err)
+ end;
+skip_unread(#reader{}=Reader) ->
+ {ok, Reader};
+skip_unread(#reg_file_reader{handle=Handle,num_bytes=0}) ->
+ skip_unread(Handle);
+skip_unread(#reg_file_reader{}=Reader) ->
+ #reg_file_reader{handle=Handle} = skip_file(Reader),
+ {ok, Handle};
+skip_unread(#sparse_file_reader{handle=Handle,num_bytes=0}) ->
+ skip_unread(Handle);
+skip_unread(#sparse_file_reader{}=Reader) ->
+ #sparse_file_reader{handle=Handle} = skip_file(Reader),
+ {ok, Handle}.
+
+write_extracted_element(#tar_header{name=Name,typeflag=Type},
+ Bin,
+ #read_opts{output=memory}=Opts) ->
+ case typeflag(Type) of
+ regular ->
+ read_verbose(Opts, "x ~ts~n", [Name]),
+ {ok, {Name, Bin}};
+ _ ->
+ ok
end;
-write_extracted_element(Header, Bin, Opts) ->
- Name = filename:absname(Header#tar_header.name, Opts#read_opts.cwd),
- Created =
- case Header#tar_header.typeflag of
- regular ->
- write_extracted_file(Name, Bin, Opts);
- directory ->
- create_extracted_dir(Name, Opts);
- symlink ->
- create_symlink(Name, Header, Opts);
- Other -> % Ignore.
- read_verbose(Opts, "x ~ts - unsupported type ~p~n",
- [Name, Other]),
- not_written
- end,
+write_extracted_element(#tar_header{name=Name0}=Header, Bin, Opts) ->
+ Name1 = filename:absname(Name0, Opts#read_opts.cwd),
+ Created =
+ case typeflag(Header#tar_header.typeflag) of
+ regular ->
+ create_regular(Name1, Name0, Bin, Opts);
+ directory ->
+ read_verbose(Opts, "x ~ts~n", [Name0]),
+ create_extracted_dir(Name1, Opts);
+ symlink ->
+ read_verbose(Opts, "x ~ts~n", [Name0]),
+ create_symlink(Name1, Header#tar_header.linkname, Opts);
+ Device when Device =:= char orelse Device =:= block ->
+ %% char/block devices will be created as empty files
+ %% and then have their major/minor device set later
+ create_regular(Name1, Name0, <<>>, Opts);
+ fifo ->
+ %% fifo devices will be created as empty files
+ create_regular(Name1, Name0, <<>>, Opts);
+ Other -> % Ignore.
+ read_verbose(Opts, "x ~ts - unsupported type ~p~n",
+ [Name0, Other]),
+ not_written
+ end,
case Created of
- ok -> set_extracted_file_info(Name, Header);
- not_written -> ok
+ ok -> set_extracted_file_info(Name1, Header);
+ not_written -> ok
+ end.
+
+create_regular(Name, NameInArchive, Bin, Opts) ->
+ case write_extracted_file(Name, Bin, Opts) of
+ not_written ->
+ read_verbose(Opts, "x ~ts - exists, not created~n", [NameInArchive]),
+ not_written;
+ Ok ->
+ read_verbose(Opts, "x ~ts~n", [NameInArchive]),
+ Ok
end.
create_extracted_dir(Name, _Opts) ->
case file:make_dir(Name) of
- ok -> ok;
- {error,enotsup} -> not_written;
- {error,eexist} -> not_written;
- {error,enoent} -> make_dirs(Name, dir);
- {error,Reason} -> throw({error, Reason})
+ ok -> ok;
+ {error,enotsup} -> not_written;
+ {error,eexist} -> not_written;
+ {error,enoent} -> make_dirs(Name, dir);
+ {error,Reason} -> throw({error, Reason})
end.
-create_symlink(Name, #tar_header{linkname=Linkname}=Header, Opts) ->
+create_symlink(Name, Linkname, Opts) ->
case file:make_symlink(Linkname, Name) of
- ok -> ok;
- {error,enoent} ->
- ok = make_dirs(Name, file),
- create_symlink(Name, Header, Opts);
- {error,eexist} -> not_written;
- {error,enotsup} ->
- read_verbose(Opts, "x ~ts - symbolic links not supported~n", [Name]),
- not_written;
- {error,Reason} -> throw({error, Reason})
+ ok -> ok;
+ {error,enoent} ->
+ ok = make_dirs(Name, file),
+ create_symlink(Name, Linkname, Opts);
+ {error,eexist} -> not_written;
+ {error,enotsup} ->
+ read_verbose(Opts, "x ~ts - symbolic links not supported~n", [Name]),
+ not_written;
+ {error,Reason} -> throw({error, Reason})
end.
write_extracted_file(Name, Bin, Opts) ->
Write =
- case Opts#read_opts.keep_old_files of
- true ->
- case file:read_file_info(Name) of
- {ok, _} -> false;
- _ -> true
- end;
- false -> true
- end,
+ case Opts#read_opts.keep_old_files of
+ true ->
+ case file:read_file_info(Name) of
+ {ok, _} -> false;
+ _ -> true
+ end;
+ false -> true
+ end,
case Write of
- true ->
- read_verbose(Opts, "x ~ts~n", [Name]),
- write_file(Name, Bin);
- false ->
- read_verbose(Opts, "x ~ts - exists, not created~n", [Name]),
- not_written
+ true -> write_file(Name, Bin);
+ false -> not_written
end.
write_file(Name, Bin) ->
case file:write_file(Name, Bin) of
- ok -> ok;
- {error,enoent} ->
- ok = make_dirs(Name, file),
- write_file(Name, Bin);
- {error,Reason} ->
- throw({error, Reason})
+ ok -> ok;
+ {error,enoent} ->
+ ok = make_dirs(Name, file),
+ write_file(Name, Bin);
+ {error,Reason} ->
+ throw({error, Reason})
end.
-set_extracted_file_info(_, #tar_header{typeflag = symlink}) -> ok;
-set_extracted_file_info(Name, #tar_header{mode=Mode, mtime=Mtime}) ->
- Info = #file_info{mode=Mode, mtime=posix_to_erlang_time(Mtime)},
+set_extracted_file_info(_, #tar_header{typeflag = ?TYPE_SYMLINK}) -> ok;
+set_extracted_file_info(_, #tar_header{typeflag = ?TYPE_LINK}) -> ok;
+set_extracted_file_info(Name, #tar_header{typeflag = ?TYPE_CHAR}=Header) ->
+ set_device_info(Name, Header);
+set_extracted_file_info(Name, #tar_header{typeflag = ?TYPE_BLOCK}=Header) ->
+ set_device_info(Name, Header);
+set_extracted_file_info(Name, #tar_header{mtime=Mtime,mode=Mode}) ->
+ Info = #file_info{mode=Mode, mtime=Mtime},
+ file:write_file_info(Name, Info).
+
+set_device_info(Name, #tar_header{}=Header) ->
+ Mtime = Header#tar_header.mtime,
+ Mode = Header#tar_header.mode,
+ Devmajor = Header#tar_header.devmajor,
+ Devminor = Header#tar_header.devminor,
+ Info = #file_info{
+ mode=Mode,
+ mtime=Mtime,
+ major_device=Devmajor,
+ minor_device=Devminor
+ },
file:write_file_info(Name, Info).
%% Makes all directories leading up to the file.
make_dirs(Name, file) ->
- filelib:ensure_dir(Name);
+ filelib:ensure_dir(Name);
make_dirs(Name, dir) ->
- filelib:ensure_dir(filename:join(Name,"*")).
+ filelib:ensure_dir(filename:join(Name,"*")).
%% Prints the message on if the verbose option is given (for reading).
-
read_verbose(#read_opts{verbose=true}, Format, Args) ->
- io:format(Format, Args),
- io:nl();
+ io:format(Format, Args);
read_verbose(_, _, _) ->
ok.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%%%
-%%% Utility functions.
-%%%
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-%% Returns the checksum of a binary.
-
-checksum(Bin) -> checksum(Bin, 0).
-
-checksum(<<A,B,C,D,E,F,G,H,T/binary>>, Sum) ->
- checksum(T, Sum+A+B+C+D+E+F+G+H);
-checksum(<<A,T/binary>>, Sum) ->
- checksum(T, Sum+A);
-checksum(<<>>, Sum) -> Sum.
-
-%% Returns a list of zeroes to pad out to the given block size.
-
-padding(Size, BlockSize) ->
- zeroes(pad_size(Size, BlockSize)).
-
-pad_size(Size, BlockSize) ->
- case Size rem BlockSize of
- 0 -> 0;
- Rem -> BlockSize-Rem
- end.
-
-zeroes(0) -> [];
-zeroes(1) -> [0];
-zeroes(2) -> [0,0];
-zeroes(Number) ->
- Half = zeroes(Number div 2),
- case Number rem 2 of
- 0 -> [Half|Half];
- 1 -> [Half|[0|Half]]
- end.
-
-%% Skips the given number of bytes rounded up to an even record.
-
-skip(File, Size) ->
- %% Note: There is no point in handling failure to get the current position
- %% in the file. If it doesn't work, something serious is wrong.
- Amount = ((Size + ?record_size - 1) div ?record_size) * ?record_size,
- {ok,_} = do_position(File, {cur, Amount}),
- ok.
-
-%% Skips to the next record in the file.
-
-skip_to_next(File) ->
- %% Note: There is no point in handling failure to get the current position
- %% in the file. If it doesn't work, something serious is wrong.
- {ok, Position} = do_position(File, {cur, 0}),
- NewPosition = ((Position + ?record_size - 1) div ?record_size) * ?record_size,
- {ok,NewPosition} = do_position(File, NewPosition),
- ok.
-
%% Prints the message on if the verbose option is given.
-
add_verbose(#add_opts{verbose=true}, Format, Args) ->
io:format(Format, Args);
add_verbose(_, _, _) ->
ok.
-%% Converts a tuple containing the time to a Posix time (seconds
-%% since Jan 1, 1970).
+%%%%%%%%%%%%%%%%%%
+%% I/O primitives
+%%%%%%%%%%%%%%%%%%
+
+do_write(#reader{handle=Handle,func=Fun}=Reader0, Data)
+ when is_function(Fun,2) ->
+ case Fun(write,{Handle,Data}) of
+ ok ->
+ {ok, Pos, Reader1} = do_position(Reader0, {cur,0}),
+ {ok, Reader1#reader{pos=Pos}};
+ {error, _} = Err ->
+ Err
+ end.
-posix_time(Time) ->
- EpochStart = {{1970,1,1},{0,0,0}},
- {Days,{Hour,Min,Sec}} = calendar:time_difference(EpochStart, Time),
- 86400*Days + 3600*Hour + 60*Min + Sec.
+do_copy(#reader{func=Fun}=Reader, Source, #add_opts{chunk_size=0}=Opts)
+ when is_function(Fun, 2) ->
+ do_copy(Reader, Source, Opts#add_opts{chunk_size=65536});
+do_copy(#reader{func=Fun}=Reader, Source, #add_opts{chunk_size=ChunkSize})
+ when is_function(Fun, 2) ->
+ case file:open(Source, [read, binary]) of
+ {ok, SourceFd} ->
+ case copy_chunked(Reader, SourceFd, ChunkSize, 0) of
+ {ok, _Copied, _Reader2} = Ok->
+ _ = file:close(SourceFd),
+ Ok;
+ Err ->
+ _ = file:close(SourceFd),
+ throw(Err)
+ end;
+ Err ->
+ throw(Err)
+ end.
-posix_to_erlang_time(Sec) ->
- OneMillion = 1000000,
- Time = calendar:now_to_datetime({Sec div OneMillion, Sec rem OneMillion, 0}),
- erlang:universaltime_to_localtime(Time).
+copy_chunked(#reader{}=Reader, Source, ChunkSize, Copied) ->
+ case file:read(Source, ChunkSize) of
+ {ok, Bin} ->
+ {ok, Reader2} = do_write(Reader, Bin),
+ copy_chunked(Reader2, Source, ChunkSize, Copied+byte_size(Bin));
+ eof ->
+ {ok, Copied, Reader};
+ Other ->
+ Other
+ end.
-read_file_and_info(Name, Opts) ->
- ReadInfo = Opts#add_opts.read_info,
- case ReadInfo(Name) of
- {ok,Info} when Info#file_info.type =:= regular,
- Opts#add_opts.chunk_size>0 ->
- {ok,chunked,Info};
- {ok,Info} when Info#file_info.type =:= regular ->
- case file:read_file(Name) of
- {ok,Bin} ->
- {ok,Bin,Info};
- Error ->
- Error
- end;
- {ok,Info} when Info#file_info.type =:= symlink ->
- case file:read_link(Name) of
- {ok,PointsTo} ->
- {ok,PointsTo,Info};
- Error ->
- Error
- end;
- {ok, Info} ->
- {ok,[],Info};
- Error ->
- Error
+
+do_position(#reader{handle=Handle,func=Fun}=Reader, Pos)
+ when is_function(Fun,2)->
+ case Fun(position, {Handle,Pos}) of
+ {ok, NewPos} ->
+ %% since Pos may not always be an absolute seek,
+ %% make sure we update the reader with the new absolute position
+ {ok, AbsPos} = Fun(position, {Handle, {cur, 0}}),
+ {ok, NewPos, Reader#reader{pos=AbsPos}};
+ Other ->
+ Other
end.
-foreach_while_ok(Fun, [First|Rest]) ->
- case Fun(First) of
- ok -> foreach_while_ok(Fun, Rest);
- Other -> Other
+do_read(#reg_file_reader{handle=Handle,pos=Pos,size=Size}=Reader, Len) ->
+ NumBytes = Size - Pos,
+ ActualLen = if NumBytes - Len < 0 -> NumBytes; true -> Len end,
+ case do_read(Handle, ActualLen) of
+ {ok, Bin, Handle2} ->
+ NewPos = Pos + ActualLen,
+ NumBytes2 = Size - NewPos,
+ Reader1 = Reader#reg_file_reader{
+ handle=Handle2,
+ pos=NewPos,
+ num_bytes=NumBytes2},
+ {ok, Bin, Reader1};
+ Other ->
+ Other
end;
-foreach_while_ok(_, []) -> ok.
-
-open_mode(Mode) ->
- open_mode(Mode, false, [raw], []).
+do_read(#sparse_file_reader{}=Reader, Len) ->
+ do_sparse_read(Reader, Len);
+do_read(#reader{pos=Pos,handle=Handle,func=Fun}=Reader, Len)
+ when is_function(Fun,2)->
+ %% Always convert to binary internally
+ case Fun(read2,{Handle,Len}) of
+ {ok, List} when is_list(List) ->
+ Bin = list_to_binary(List),
+ NewPos = Pos+byte_size(Bin),
+ {ok, Bin, Reader#reader{pos=NewPos}};
+ {ok, Bin} when is_binary(Bin) ->
+ NewPos = Pos+byte_size(Bin),
+ {ok, Bin, Reader#reader{pos=NewPos}};
+ Other ->
+ Other
+ end.
-open_mode(read, _, Raw, _) ->
- {ok, read, Raw, []};
-open_mode(write, _, Raw, _) ->
- {ok, write, Raw, []};
-open_mode([read|Rest], false, Raw, Opts) ->
- open_mode(Rest, read, Raw, Opts);
-open_mode([write|Rest], false, Raw, Opts) ->
- open_mode(Rest, write, Raw, Opts);
-open_mode([compressed|Rest], Access, Raw, Opts) ->
- open_mode(Rest, Access, Raw, [compressed|Opts]);
-open_mode([cooked|Rest], Access, _Raw, Opts) ->
- open_mode(Rest, Access, [], Opts);
-open_mode([], Access, Raw, Opts) ->
- {ok, Access, Raw, Opts};
-open_mode(_, _, _, _) ->
- {error, einval}.
-%%%================================================================
-do_write({tar_descriptor,UsrHandle,Fun}, Data) -> Fun(write,{UsrHandle,Data}).
+do_sparse_read(Reader, Len) ->
+ do_sparse_read(Reader, Len, <<>>).
+
+do_sparse_read(#sparse_file_reader{sparse_map=[#sparse_entry{num_bytes=0}|Entries]
+ }=Reader0, Len, Acc) ->
+ %% skip all empty fragments
+ Reader1 = Reader0#sparse_file_reader{sparse_map=Entries},
+ do_sparse_read(Reader1, Len, Acc);
+do_sparse_read(#sparse_file_reader{sparse_map=[],
+ pos=Pos,size=Size}=Reader0, Len, Acc)
+ when Pos < Size ->
+ %% if there are no more fragments, it is possible that there is one last sparse hole
+ %% this behaviour matches the BSD tar utility
+ %% however, GNU tar stops returning data even if we haven't reached the end
+ {ok, Bin, Reader1} = read_sparse_hole(Reader0, Size, Len),
+ do_sparse_read(Reader1, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>);
+do_sparse_read(#sparse_file_reader{sparse_map=[]}=Reader, _Len, Acc) ->
+ {ok, Acc, Reader};
+do_sparse_read(#sparse_file_reader{}=Reader, 0, Acc) ->
+ {ok, Acc, Reader};
+do_sparse_read(#sparse_file_reader{sparse_map=[#sparse_entry{offset=Offset}|_],
+ pos=Pos}=Reader0, Len, Acc)
+ when Pos < Offset ->
+ {ok, Bin, Reader1} = read_sparse_hole(Reader0, Offset, Offset-Pos),
+ do_sparse_read(Reader1, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>);
+do_sparse_read(#sparse_file_reader{sparse_map=[Entry|Entries],
+ pos=Pos}=Reader0, Len, Acc) ->
+ %% we're in a data fragment, so read from it
+ %% end offset of fragment
+ EndPos = Entry#sparse_entry.offset + Entry#sparse_entry.num_bytes,
+ %% bytes left in fragment
+ NumBytes = EndPos - Pos,
+ ActualLen = if Len > NumBytes -> NumBytes; true -> Len end,
+ case do_read(Reader0#sparse_file_reader.handle, ActualLen) of
+ {ok, Bin, Handle} ->
+ BytesRead = byte_size(Bin),
+ ActualEndPos = Pos+BytesRead,
+ Reader1 = if ActualEndPos =:= EndPos ->
+ Reader0#sparse_file_reader{sparse_map=Entries};
+ true ->
+ Reader0
+ end,
+ Size = Reader1#sparse_file_reader.size,
+ NumBytes2 = Size - ActualEndPos,
+ Reader2 = Reader1#sparse_file_reader{
+ handle=Handle,
+ pos=ActualEndPos,
+ num_bytes=NumBytes2},
+ do_sparse_read(Reader2, Len-byte_size(Bin), <<Acc/binary,Bin/binary>>);
+ Other ->
+ Other
+ end.
+
+%% Reads a sparse hole ending at Offset
+read_sparse_hole(#sparse_file_reader{pos=Pos}=Reader, Offset, Len) ->
+ N = Offset - Pos,
+ N2 = if N > Len ->
+ Len;
+ true ->
+ N
+ end,
+ Bin = <<0:N2/unit:8>>,
+ NumBytes = Reader#sparse_file_reader.size - (Pos+N2),
+ {ok, Bin, Reader#sparse_file_reader{
+ num_bytes=NumBytes,
+ pos=Pos+N2}}.
+
+-spec do_close(reader()) -> ok | {error, term()}.
+do_close(#reader{handle=Handle,func=Fun}) when is_function(Fun,2) ->
+ Fun(close,Handle).
+
+%%%%%%%%%%%%%%%%%%
+%% Option parsing
+%%%%%%%%%%%%%%%%%%
-do_position({tar_descriptor,UsrHandle,Fun}, Pos) -> Fun(position,{UsrHandle,Pos}).
+extract_opts(List) ->
+ extract_opts(List, default_options()).
-do_read({tar_descriptor,UsrHandle,Fun}, Len) -> Fun(read2,{UsrHandle,Len}).
+table_opts(List) ->
+ read_opts(List, default_options()).
+
+default_options() ->
+ {ok, Cwd} = file:get_cwd(),
+ #read_opts{cwd=Cwd}.
-do_close({tar_descriptor,UsrHandle,Fun}) -> Fun(close,UsrHandle).
+extract_opts([keep_old_files|Rest], Opts) ->
+ extract_opts(Rest, Opts#read_opts{keep_old_files=true});
+extract_opts([{cwd, Cwd}|Rest], Opts) ->
+ extract_opts(Rest, Opts#read_opts{cwd=Cwd});
+extract_opts([{files, Files}|Rest], Opts) ->
+ Set = ordsets:from_list(Files),
+ extract_opts(Rest, Opts#read_opts{files=Set});
+extract_opts([memory|Rest], Opts) ->
+ extract_opts(Rest, Opts#read_opts{output=memory});
+extract_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
+ extract_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]});
+extract_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
+ extract_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]});
+extract_opts([verbose|Rest], Opts) ->
+ extract_opts(Rest, Opts#read_opts{verbose=true});
+extract_opts([Other|Rest], Opts) ->
+ extract_opts(Rest, read_opts([Other], Opts));
+extract_opts([], Opts) ->
+ Opts.
+
+read_opts([compressed|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
+ read_opts(Rest, Opts#read_opts{open_mode=[compressed|OpenMode]});
+read_opts([cooked|Rest], Opts=#read_opts{open_mode=OpenMode}) ->
+ read_opts(Rest, Opts#read_opts{open_mode=[cooked|OpenMode]});
+read_opts([verbose|Rest], Opts) ->
+ read_opts(Rest, Opts#read_opts{verbose=true});
+read_opts([_|Rest], Opts) ->
+ read_opts(Rest, Opts);
+read_opts([], Opts) ->
+ Opts.
diff --git a/lib/stdlib/src/erl_tar.hrl b/lib/stdlib/src/erl_tar.hrl
new file mode 100644
index 0000000000..d646d02989
--- /dev/null
+++ b/lib/stdlib/src/erl_tar.hrl
@@ -0,0 +1,394 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+
+%% Options used when adding files to a tar archive.
+-record(add_opts, {
+ read_info, %% Fun to use for read file/link info.
+ chunk_size = 0, %% For file reading when sending to sftp. 0=do not chunk
+ verbose = false}). %% Verbose on/off.
+-type add_opts() :: #add_opts{}.
+
+%% Options used when reading a tar archive.
+-record(read_opts, {
+ cwd :: string(), %% Current working directory.
+ keep_old_files = false :: boolean(), %% Owerwrite or not.
+ files = all, %% Set of files to extract (or all)
+ output = file :: 'file' | 'memory',
+ open_mode = [], %% Open mode options.
+ verbose = false :: boolean()}). %% Verbose on/off.
+-type read_opts() :: #read_opts{}.
+
+-type add_opt() :: dereference |
+ verbose |
+ {chunks, pos_integer()}.
+
+-type extract_opt() :: {cwd, string()} |
+ {files, [string()]} |
+ compressed |
+ cooked |
+ memory |
+ keep_old_files |
+ verbose.
+
+-type create_opt() :: compressed |
+ cooked |
+ dereference |
+ verbose.
+
+-type filelist() :: [file:filename() |
+ {string(), binary()} |
+ {string(), file:filename()}].
+
+%% The tar header, once fully parsed.
+-record(tar_header, {
+ name = "" :: string(), %% name of header file entry
+ mode = 8#100644 :: non_neg_integer(), %% permission and mode bits
+ uid = 0 :: non_neg_integer(), %% user id of owner
+ gid = 0 :: non_neg_integer(), %% group id of owner
+ size = 0 :: non_neg_integer(), %% length in bytes
+ mtime :: calendar:datetime(), %% modified time
+ typeflag :: char(), %% type of header entry
+ linkname = "" :: string(), %% target name of link
+ uname = "" :: string(), %% user name of owner
+ gname = "" :: string(), %% group name of owner
+ devmajor = 0 :: non_neg_integer(), %% major number of character or block device
+ devminor = 0 :: non_neg_integer(), %% minor number of character or block device
+ atime :: calendar:datetime(), %% access time
+ ctime :: calendar:datetime() %% status change time
+ }).
+-type tar_header() :: #tar_header{}.
+
+%% Metadata for a sparse file fragment
+-record(sparse_entry, {
+ offset = 0 :: non_neg_integer(),
+ num_bytes = 0 :: non_neg_integer()}).
+-type sparse_entry() :: #sparse_entry{}.
+%% Contains metadata about fragments of a sparse file
+-record(sparse_array, {
+ entries = [] :: [sparse_entry()],
+ is_extended = false :: boolean(),
+ max_entries = 0 :: non_neg_integer()}).
+-type sparse_array() :: #sparse_array{}.
+%% A subset of tar header fields common to all tar implementations
+-record(header_v7, {
+ name :: binary(),
+ mode :: binary(), %% octal
+ uid :: binary(), %% integer
+ gid :: binary(), %% integer
+ size :: binary(), %% integer
+ mtime :: binary(), %% integer
+ checksum :: binary(), %% integer
+ typeflag :: byte(), %% char
+ linkname :: binary()}).
+-type header_v7() :: #header_v7{}.
+%% The set of fields specific to GNU tar formatted archives
+-record(header_gnu, {
+ header_v7 :: header_v7(),
+ magic :: binary(),
+ version :: binary(),
+ uname :: binary(),
+ gname :: binary(),
+ devmajor :: binary(), %% integer
+ devminor :: binary(), %% integer
+ atime :: binary(), %% integer
+ ctime :: binary(), %% integer
+ sparse :: sparse_array(),
+ real_size :: binary()}). %% integer
+-type header_gnu() :: #header_gnu{}.
+%% The set of fields specific to STAR-formatted archives
+-record(header_star, {
+ header_v7 :: header_v7(),
+ magic :: binary(),
+ version :: binary(),
+ uname :: binary(),
+ gname :: binary(),
+ devmajor :: binary(), %% integer
+ devminor :: binary(), %% integer
+ prefix :: binary(),
+ atime :: binary(), %% integer
+ ctime :: binary(), %% integer
+ trailer :: binary()}).
+-type header_star() :: #header_star{}.
+%% The set of fields specific to USTAR-formatted archives
+-record(header_ustar, {
+ header_v7 :: header_v7(),
+ magic :: binary(),
+ version :: binary(),
+ uname :: binary(),
+ gname :: binary(),
+ devmajor :: binary(), %% integer
+ devminor :: binary(), %% integer
+ prefix :: binary()}).
+-type header_ustar() :: #header_ustar{}.
+
+-type header_fields() :: header_v7() |
+ header_gnu() |
+ header_star() |
+ header_ustar().
+
+%% The overall tar reader, it holds the low-level file handle,
+%% its access, position, and the I/O primitives wrapper.
+-record(reader, {
+ handle :: file:io_device() | term(),
+ access :: read | write | ram,
+ pos = 0 :: non_neg_integer(),
+ func :: file_op()
+ }).
+-type reader() :: #reader{}.
+%% A reader for a regular file within the tar archive,
+%% It tracks its current state relative to that file.
+-record(reg_file_reader, {
+ handle :: reader(),
+ num_bytes = 0,
+ pos = 0,
+ size = 0
+ }).
+-type reg_file_reader() :: #reg_file_reader{}.
+%% A reader for a sparse file within the tar archive,
+%% It tracks its current state relative to that file.
+-record(sparse_file_reader, {
+ handle :: reader(),
+ num_bytes = 0, %% bytes remaining
+ pos = 0, %% pos
+ size = 0, %% total size of file
+ sparse_map = #sparse_array{}
+ }).
+-type sparse_file_reader() :: #sparse_file_reader{}.
+
+%% Types for the readers
+-type reader_type() :: reader() | reg_file_reader() | sparse_file_reader().
+-type handle() :: file:io_device() | term().
+
+%% Type for the I/O primitive wrapper function
+-type file_op() :: fun((write | close | read2 | position,
+ {handle(), iodata()} | handle() | {handle(), non_neg_integer()}
+ | {handle(), non_neg_integer()}) ->
+ ok | eof | {ok, string() | binary()} | {ok, non_neg_integer()}
+ | {error, term()}).
+
+%% These constants (except S_IFMT) are
+%% used to determine what type of device
+%% a file is. Namely, `S_IFMT band file_info.mode`
+%% will equal one of these contants, and tells us
+%% which type it is. The stdlib file_info record
+%% does not differentiate between device types, and
+%% will not allow us to differentiate between sockets
+%% and named pipes. These constants are pulled from libc.
+-define(S_IFMT, 61440).
+-define(S_IFSOCK, 49152). %% socket
+-define(S_FIFO, 4096). %% fifo/named pipe
+-define(S_IFBLK, 24576). %% block device
+-define(S_IFCHR, 8192). %% character device
+
+%% Typeflag constants for the tar header
+-define(TYPE_REGULAR, $0). %% regular file
+-define(TYPE_REGULAR_A, 0). %% regular file
+-define(TYPE_LINK, $1). %% hard link
+-define(TYPE_SYMLINK, $2). %% symbolic link
+-define(TYPE_CHAR, $3). %% character device node
+-define(TYPE_BLOCK, $4). %% block device node
+-define(TYPE_DIR, $5). %% directory
+-define(TYPE_FIFO, $6). %% fifo node
+-define(TYPE_CONT, $7). %% reserved
+-define(TYPE_X_HEADER, $x). %% extended header
+-define(TYPE_X_GLOBAL_HEADER, $g). %% global extended header
+-define(TYPE_GNU_LONGNAME, $L). %% next file has a long name
+-define(TYPE_GNU_LONGLINK, $K). %% next file symlinks to a file with a long name
+-define(TYPE_GNU_SPARSE, $S). %% sparse file
+
+%% Mode constants from tar spec
+-define(MODE_ISUID, 4000). %% set uid
+-define(MODE_ISGID, 2000). %% set gid
+-define(MODE_ISVTX, 1000). %% save text (sticky bit)
+-define(MODE_ISDIR, 40000). %% directory
+-define(MODE_ISFIFO, 10000). %% fifo
+-define(MODE_ISREG, 100000). %% regular file
+-define(MODE_ISLNK, 120000). %% symbolic link
+-define(MODE_ISBLK, 60000). %% block special file
+-define(MODE_ISCHR, 20000). %% character special file
+-define(MODE_ISSOCK, 140000). %% socket
+
+%% Keywords for PAX extended header
+-define(PAX_ATIME, <<"atime">>).
+-define(PAX_CHARSET, <<"charset">>).
+-define(PAX_COMMENT, <<"comment">>).
+-define(PAX_CTIME, <<"ctime">>). %% ctime is not a valid pax header
+-define(PAX_GID, <<"gid">>).
+-define(PAX_GNAME, <<"gname">>).
+-define(PAX_LINKPATH, <<"linkpath">>).
+-define(PAX_MTIME, <<"mtime">>).
+-define(PAX_PATH, <<"path">>).
+-define(PAX_SIZE, <<"size">>).
+-define(PAX_UID, <<"uid">>).
+-define(PAX_UNAME, <<"uname">>).
+-define(PAX_XATTR, <<"SCHILY.xattr.">>).
+-define(PAX_XATTR_STR, "SCHILY.xattr.").
+-define(PAX_NONE, <<"">>).
+
+%% Tar format constants
+%% Unknown format
+-define(FORMAT_UNKNOWN, 0).
+%% The format of the original Unix V7 tar tool prior to standardization
+-define(FORMAT_V7, 1).
+%% The old and new GNU formats, incompatible with USTAR.
+%% This covers the old GNU sparse extension, but it does
+%% not cover the GNU sparse extensions using PAX headers,
+%% versions 0.0, 0.1, and 1.0; these fall under the PAX format.
+-define(FORMAT_GNU, 2).
+%% Schily's tar format, which is incompatible with USTAR.
+%% This does not cover STAR extensions to the PAX format; these
+%% fall under the PAX format.
+-define(FORMAT_STAR, 3).
+%% USTAR is the former standardization of tar defined in POSIX.1-1988,
+%% it is incompatible with the GNU and STAR formats.
+-define(FORMAT_USTAR, 4).
+%% PAX is the latest standardization of tar defined in POSIX.1-2001.
+%% This is an extension of USTAR and is "backwards compatible" with it.
+%%
+%% Some newer formats add their own extensions to PAX, such as GNU sparse
+%% files and SCHILY extended attributes. Since they are backwards compatible
+%% with PAX, they will be labelled as "PAX".
+-define(FORMAT_PAX, 5).
+
+%% Magic constants
+-define(MAGIC_GNU, <<"ustar ">>).
+-define(VERSION_GNU, <<" \x00">>).
+-define(MAGIC_USTAR, <<"ustar\x00">>).
+-define(VERSION_USTAR, <<"00">>).
+-define(TRAILER_STAR, <<"tar\x00">>).
+
+%% Size constants
+-define(BLOCK_SIZE, 512). %% size of each block in a tar stream
+-define(NAME_SIZE, 100). %% max length of the name field in USTAR format
+-define(PREFIX_SIZE, 155). %% max length of the prefix field in USTAR format
+
+%% Maximum size of a nanosecond value as an integer
+-define(MAX_NANO_INT_SIZE, 9).
+%% Maximum size of a 64-bit signed integer
+-define(MAX_INT64, (1 bsl 63 - 1)).
+
+-define(PAX_GNU_SPARSE_NUMBLOCKS, <<"GNU.sparse.numblocks">>).
+-define(PAX_GNU_SPARSE_OFFSET, <<"GNU.sparse.offset">>).
+-define(PAX_GNU_SPARSE_NUMBYTES, <<"GNU.sparse.numbytes">>).
+-define(PAX_GNU_SPARSE_MAP, <<"GNU.sparse.map">>).
+-define(PAX_GNU_SPARSE_NAME, <<"GNU.sparse.name">>).
+-define(PAX_GNU_SPARSE_MAJOR, <<"GNU.sparse.major">>).
+-define(PAX_GNU_SPARSE_MINOR, <<"GNU.sparse.minor">>).
+-define(PAX_GNU_SPARSE_SIZE, <<"GNU.sparse.size">>).
+-define(PAX_GNU_SPARSE_REALSIZE, <<"GNU.sparse.realsize">>).
+
+-define(V7_NAME, 0).
+-define(V7_NAME_LEN, 100).
+-define(V7_MODE, 100).
+-define(V7_MODE_LEN, 8).
+-define(V7_UID, 108).
+-define(V7_UID_LEN, 8).
+-define(V7_GID, 116).
+-define(V7_GID_LEN, 8).
+-define(V7_SIZE, 124).
+-define(V7_SIZE_LEN, 12).
+-define(V7_MTIME, 136).
+-define(V7_MTIME_LEN, 12).
+-define(V7_CHKSUM, 148).
+-define(V7_CHKSUM_LEN, 8).
+-define(V7_TYPE, 156).
+-define(V7_TYPE_LEN, 1).
+-define(V7_LINKNAME, 157).
+-define(V7_LINKNAME_LEN, 100).
+
+-define(STAR_TRAILER, 508).
+-define(STAR_TRAILER_LEN, 4).
+
+-define(USTAR_MAGIC, 257).
+-define(USTAR_MAGIC_LEN, 6).
+-define(USTAR_VERSION, 263).
+-define(USTAR_VERSION_LEN, 2).
+-define(USTAR_UNAME, 265).
+-define(USTAR_UNAME_LEN, 32).
+-define(USTAR_GNAME, 297).
+-define(USTAR_GNAME_LEN, 32).
+-define(USTAR_DEVMAJ, 329).
+-define(USTAR_DEVMAJ_LEN, 8).
+-define(USTAR_DEVMIN, 337).
+-define(USTAR_DEVMIN_LEN, 8).
+-define(USTAR_PREFIX, 345).
+-define(USTAR_PREFIX_LEN, 155).
+
+-define(GNU_MAGIC, 257).
+-define(GNU_MAGIC_LEN, 6).
+-define(GNU_VERSION, 263).
+-define(GNU_VERSION_LEN, 2).
+
+%% ?BLOCK_SIZE of zero-bytes.
+%% Two of these in a row mark the end of an archive.
+-define(ZERO_BLOCK, <<0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,
+ 0,0,0,0,0,0,0,0,0,0,0,0>>).
+
+-define(BILLION, 1000000000).
+
+-define(EPOCH, {{1970,1,1}, {0,0,0}}).
diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl
index 20de06fd0b..d6fd1e3ea1 100644
--- a/lib/stdlib/src/ets.erl
+++ b/lib/stdlib/src/ets.erl
@@ -51,8 +51,8 @@
-type tab() :: atom() | tid().
-type type() :: set | ordered_set | bag | duplicate_bag.
-type continuation() :: '$end_of_table'
- | {tab(),integer(),integer(),binary(),list(),integer()}
- | {tab(),_,_,integer(),binary(),list(),integer(),integer()}.
+ | {tab(),integer(),integer(),comp_match_spec(),list(),integer()}
+ | {tab(),_,_,integer(),comp_match_spec(),list(),integer(),integer()}.
-opaque tid() :: integer().
@@ -488,7 +488,7 @@ update_element(_, _, _) ->
%%% End of BIFs
--opaque comp_match_spec() :: binary(). %% this one is REALLY opaque
+-opaque comp_match_spec() :: reference().
-spec match_spec_run(List, CompiledMatchSpec) -> list() when
List :: [tuple()],
@@ -505,28 +505,28 @@ match_spec_run(List, CompiledMS) ->
repair_continuation('$end_of_table', _) ->
'$end_of_table';
%% ordered_set
-repair_continuation(Untouched = {Table,Lastkey,EndCondition,N2,Bin,L2,N3,N4}, MS)
+repair_continuation(Untouched = {Table,Lastkey,EndCondition,N2,MSRef,L2,N3,N4}, MS)
when %% (is_atom(Table) or is_integer(Table)),
is_integer(N2),
- byte_size(Bin) =:= 0,
+ %% is_reference(MSRef),
is_list(L2),
is_integer(N3),
is_integer(N4) ->
- case ets:is_compiled_ms(Bin) of
+ case ets:is_compiled_ms(MSRef) of
true ->
Untouched;
false ->
{Table,Lastkey,EndCondition,N2,ets:match_spec_compile(MS),L2,N3,N4}
end;
%% set/bag/duplicate_bag
-repair_continuation(Untouched = {Table,N1,N2,Bin,L,N3}, MS)
+repair_continuation(Untouched = {Table,N1,N2,MSRef,L,N3}, MS)
when %% (is_atom(Table) or is_integer(Table)),
is_integer(N1),
is_integer(N2),
- byte_size(Bin) =:= 0,
+ %% is_reference(MSRef),
is_list(L),
is_integer(N3) ->
- case ets:is_compiled_ms(Bin) of
+ case ets:is_compiled_ms(MSRef) of
true ->
Untouched;
false ->
diff --git a/lib/stdlib/src/filelib.erl b/lib/stdlib/src/filelib.erl
index 7029389e2f..daa18da9aa 100644
--- a/lib/stdlib/src/filelib.erl
+++ b/lib/stdlib/src/filelib.erl
@@ -24,6 +24,7 @@
-export([fold_files/5, last_modified/1, file_size/1, ensure_dir/1]).
-export([wildcard/3, is_dir/2, is_file/2, is_regular/2]).
-export([fold_files/6, last_modified/2, file_size/2]).
+-export([find_file/2, find_file/3, find_source/1, find_source/2, find_source/3]).
%% For debugging/testing.
-export([compile_wildcard/1]).
@@ -517,3 +518,124 @@ eval_list_dir(Dir, erl_prim_loader) ->
end;
eval_list_dir(Dir, Mod) ->
Mod:list_dir(Dir).
+
+%% Getting the rules to use for file search
+
+keep_dir_search_rules(Rules) ->
+ [T || {_,_}=T <- Rules].
+
+keep_suffix_search_rules(Rules) ->
+ [T || {_,_,_}=T <- Rules].
+
+get_search_rules() ->
+ case application:get_env(kernel, source_search_rules) of
+ undefined -> default_search_rules();
+ {ok, []} -> default_search_rules();
+ {ok, R} when is_list(R) -> R
+ end.
+
+default_search_rules() ->
+ [%% suffix-speficic rules for source search
+ {".beam", ".erl", erl_source_search_rules()},
+ {".erl", ".yrl", []},
+ {"", ".src", erl_source_search_rules()},
+ {".so", ".c", c_source_search_rules()},
+ {".o", ".c", c_source_search_rules()},
+ {"", ".c", c_source_search_rules()},
+ {"", ".in", basic_source_search_rules()},
+ %% plain old directory rules, backwards compatible
+ {"", ""},
+ {"ebin","src"},
+ {"ebin","esrc"}
+ ].
+
+basic_source_search_rules() ->
+ (erl_source_search_rules()
+ ++ c_source_search_rules()).
+
+erl_source_search_rules() ->
+ [{"ebin","src"}, {"ebin","esrc"}].
+
+c_source_search_rules() ->
+ [{"priv","c_src"}, {"priv","src"}, {"bin","c_src"}, {"bin","src"}, {"", "src"}].
+
+%% Looks for a file relative to a given directory
+
+-type find_file_rule() :: {ObjDirSuffix::string(), SrcDirSuffix::string()}.
+
+-spec find_file(filename(), filename()) ->
+ {ok, filename()} | {error, not_found}.
+find_file(Filename, Dir) ->
+ find_file(Filename, Dir, []).
+
+-spec find_file(filename(), filename(), [find_file_rule()]) ->
+ {ok, filename()} | {error, not_found}.
+find_file(Filename, Dir, []) ->
+ find_file(Filename, Dir, get_search_rules());
+find_file(Filename, Dir, Rules) ->
+ try_dir_rules(keep_dir_search_rules(Rules), Filename, Dir).
+
+%% Looks for a source file relative to the object file name and directory
+
+-type find_source_rule() :: {ObjExtension::string(), SrcExtension::string(),
+ [find_file_rule()]}.
+
+-spec find_source(filename()) ->
+ {ok, filename()} | {error, not_found}.
+find_source(FilePath) ->
+ find_source(filename:basename(FilePath), filename:dirname(FilePath)).
+
+-spec find_source(filename(), filename()) ->
+ {ok, filename()} | {error, not_found}.
+find_source(Filename, Dir) ->
+ find_source(Filename, Dir, []).
+
+-spec find_source(filename(), filename(), [find_source_rule()]) ->
+ {ok, filename()} | {error, not_found}.
+find_source(Filename, Dir, []) ->
+ find_source(Filename, Dir, get_search_rules());
+find_source(Filename, Dir, Rules) ->
+ try_suffix_rules(keep_suffix_search_rules(Rules), Filename, Dir).
+
+try_suffix_rules(Rules, Filename, Dir) ->
+ Ext = filename:extension(Filename),
+ try_suffix_rules(Rules, filename:rootname(Filename, Ext), Dir, Ext).
+
+try_suffix_rules([{Ext,Src,Rules}|Rest], Root, Dir, Ext)
+ when is_list(Src), is_list(Rules) ->
+ case try_dir_rules(add_local_search(Rules), Root ++ Src, Dir) of
+ {ok, File} -> {ok, File};
+ _Other ->
+ try_suffix_rules(Rest, Root, Dir, Ext)
+ end;
+try_suffix_rules([_|Rest], Root, Dir, Ext) ->
+ try_suffix_rules(Rest, Root, Dir, Ext);
+try_suffix_rules([], _Root, _Dir, _Ext) ->
+ {error, not_found}.
+
+%% ensuring we check the directory of the object file before any other directory
+add_local_search(Rules) ->
+ Local = {"",""},
+ [Local] ++ lists:filter(fun (X) -> X =/= Local end, Rules).
+
+try_dir_rules([{From, To}|Rest], Filename, Dir)
+ when is_list(From), is_list(To) ->
+ case try_dir_rule(Dir, Filename, From, To) of
+ {ok, File} -> {ok, File};
+ error -> try_dir_rules(Rest, Filename, Dir)
+ end;
+try_dir_rules([], _Filename, _Dir) ->
+ {error, not_found}.
+
+try_dir_rule(Dir, Filename, From, To) ->
+ case lists:suffix(From, Dir) of
+ true ->
+ NewDir = lists:sublist(Dir, 1, length(Dir)-length(From))++To,
+ Src = filename:join(NewDir, Filename),
+ case is_regular(Src) of
+ true -> {ok, Src};
+ false -> error
+ end;
+ false ->
+ error
+ end.
diff --git a/lib/stdlib/src/filename.erl b/lib/stdlib/src/filename.erl
index c4586171ca..2a2f25dcd2 100644
--- a/lib/stdlib/src/filename.erl
+++ b/lib/stdlib/src/filename.erl
@@ -19,6 +19,9 @@
%%
-module(filename).
+-deprecated({find_src,1,next_major_release}).
+-deprecated({find_src,2,next_major_release}).
+
%% Purpose: Provides generic manipulation of filenames.
%%
%% Generally, these functions accept filenames in the native format
@@ -34,8 +37,8 @@
-export([absname/1, absname/2, absname_join/2,
basename/1, basename/2, dirname/1,
extension/1, join/1, join/2, pathtype/1,
- rootname/1, rootname/2, split/1, nativename/1]).
--export([find_src/1, find_src/2, flatten/1]).
+ rootname/1, rootname/2, split/1, flatten/1, nativename/1]).
+-export([find_src/1, find_src/2]). % deprecated
-export([basedir/2, basedir/3]).
%% Undocumented and unsupported exports.
@@ -750,8 +753,12 @@ separators() ->
_ -> {false, false}
end.
-
-
+%% NOTE: The find_src/1/2 functions are deprecated; they try to do too much
+%% at once and are not a good fit for this module. Parts of the code have
+%% been moved to filelib:find_file/2 instead. Only this part of this
+%% module is allowed to call the filelib module; such mutual dependency
+%% should otherwise be avoided! This code should eventually be removed.
+%%
%% find_src(Module) --
%% find_src(Module, Rules) --
%%
@@ -793,14 +800,7 @@ separators() ->
| {'d', atom()},
ErrorReason :: 'non_existing' | 'preloaded' | 'interpreted'.
find_src(Mod) ->
- Default = [{"", ""}, {"ebin", "src"}, {"ebin", "esrc"}],
- Rules =
- case application:get_env(kernel, source_search_rules) of
- undefined -> Default;
- {ok, []} -> Default;
- {ok, R} when is_list(R) -> R
- end,
- find_src(Mod, Rules).
+ find_src(Mod, []).
-spec find_src(Beam, Rules) -> {SourceFile, Options}
| {error, {ErrorReason, Module}} when
@@ -816,44 +816,47 @@ find_src(Mod) ->
ErrorReason :: 'non_existing' | 'preloaded' | 'interpreted'.
find_src(Mod, Rules) when is_atom(Mod) ->
find_src(atom_to_list(Mod), Rules);
-find_src(File0, Rules) when is_list(File0) ->
- Mod = list_to_atom(basename(File0, ".erl")),
- File = rootname(File0, ".erl"),
- case readable_file(File++".erl") of
- true ->
- try_file(File, Mod, Rules);
- false ->
- try_file(undefined, Mod, Rules)
- end.
-
-try_file(File, Mod, Rules) ->
+find_src(ModOrFile, Rules) when is_list(ModOrFile) ->
+ Extension = ".erl",
+ Mod = list_to_atom(basename(ModOrFile, Extension)),
case code:which(Mod) of
Possibly_Rel_Path when is_list(Possibly_Rel_Path) ->
- {ok, Cwd} = file:get_cwd(),
- Path = join(Cwd, Possibly_Rel_Path),
- try_file(File, Path, Mod, Rules);
+ {ok, Cwd} = file:get_cwd(),
+ ObjPath = make_abs_path(Cwd, Possibly_Rel_Path),
+ find_src_1(ModOrFile, ObjPath, Mod, Extension, Rules);
Ecode when is_atom(Ecode) -> % Ecode :: ecode()
{error, {Ecode, Mod}}
end.
%% At this point, the Mod is known to be valid.
%% If the source name is not known, find it.
-%% Then get the compilation options.
-%% Returns: {SrcFile, Options}
+find_src_1(ModOrFile, ObjPath, Mod, Extension, Rules) ->
+ %% The documentation says this function must return the found path
+ %% without extension in all cases. Also, ModOrFile could be given with
+ %% or without extension. Hence the calls to rootname below.
+ ModOrFileRoot = rootname(ModOrFile, Extension),
+ case filelib:is_regular(ModOrFileRoot++Extension) of
+ true ->
+ find_src_2(ModOrFileRoot, Mod);
+ false ->
+ SrcName = basename(ObjPath, code:objfile_extension()) ++ Extension,
+ case filelib:find_file(SrcName, dirname(ObjPath), Rules) of
+ {ok, SrcFile} ->
+ find_src_2(rootname(SrcFile, Extension), Mod);
+ Error ->
+ Error
+ end
+ end.
-try_file(undefined, ObjFilename, Mod, Rules) ->
- case get_source_file(ObjFilename, Mod, Rules) of
- {ok, File} -> try_file(File, ObjFilename, Mod, Rules);
- Error -> Error
- end;
-try_file(Src, _ObjFilename, Mod, _Rules) ->
+%% Get the compilation options and return {SrcFileRoot, Options}
+find_src_2(SrcRoot, Mod) ->
List = case Mod:module_info(compile) of
none -> [];
List0 -> List0
end,
Options = proplists:get_value(options, List, []),
{ok, Cwd} = file:get_cwd(),
- AbsPath = make_abs_path(Cwd, Src),
+ AbsPath = make_abs_path(Cwd, SrcRoot),
{AbsPath, filter_options(dirname(AbsPath), Options, [])}.
%% Filters the options.
@@ -884,42 +887,6 @@ filter_options(Base, [_|Rest], Result) ->
filter_options(_Base, [], Result) ->
Result.
-%% Gets the source file given path of object code and module name.
-
-get_source_file(Obj, Mod, Rules) ->
- source_by_rules(dirname(Obj), atom_to_list(Mod), Rules).
-
-source_by_rules(Dir, Base, [{From, To}|Rest]) ->
- case try_rule(Dir, Base, From, To) of
- {ok, File} -> {ok, File};
- error -> source_by_rules(Dir, Base, Rest)
- end;
-source_by_rules(_Dir, _Base, []) ->
- {error, source_file_not_found}.
-
-try_rule(Dir, Base, From, To) ->
- case lists:suffix(From, Dir) of
- true ->
- NewDir = lists:sublist(Dir, 1, length(Dir)-length(From))++To,
- Src = join(NewDir, Base),
- case readable_file(Src++".erl") of
- true -> {ok, Src};
- false -> error
- end;
- false ->
- error
- end.
-
-readable_file(File) ->
- case file:read_file_info(File) of
- {ok, #file_info{type=regular, access=read}} ->
- true;
- {ok, #file_info{type=regular, access=read_write}} ->
- true;
- _Other ->
- false
- end.
-
make_abs_path(BasePath, Path) ->
join(BasePath, Path).
diff --git a/lib/stdlib/src/gen_fsm.erl b/lib/stdlib/src/gen_fsm.erl
index 6e7528fd98..e925a75fe8 100644
--- a/lib/stdlib/src/gen_fsm.erl
+++ b/lib/stdlib/src/gen_fsm.erl
@@ -273,7 +273,7 @@ start_timer(Time, Msg) ->
send_event_after(Time, Event) ->
erlang:start_timer(Time, self(), {'$gen_event', Event}).
-%% Returns the remaing time for the timer if Ref referred to
+%% Returns the remaining time for the timer if Ref referred to
%% an active timer/send_event_after, false otherwise.
cancel_timer(Ref) ->
case erlang:cancel_timer(Ref) of
diff --git a/lib/stdlib/src/io_lib.erl b/lib/stdlib/src/io_lib.erl
index ad98bc0420..a91143a764 100644
--- a/lib/stdlib/src/io_lib.erl
+++ b/lib/stdlib/src/io_lib.erl
@@ -28,7 +28,7 @@
%% Most of the code here is derived from the original prolog versions and
%% from similar code written by Joe Armstrong and myself.
%%
-%% This module has been split into seperate modules:
+%% This module has been split into separate modules:
%% io_lib - basic write and utilities
%% io_lib_format - formatted output
%% io_lib_fread - formatted input
diff --git a/lib/stdlib/src/io_lib_format.erl b/lib/stdlib/src/io_lib_format.erl
index c7b75961cb..3113767614 100644
--- a/lib/stdlib/src/io_lib_format.erl
+++ b/lib/stdlib/src/io_lib_format.erl
@@ -265,7 +265,10 @@ control($W, [A,Depth], F, Adj, P, Pad, _Enc, _Str, _I) when is_integer(Depth) ->
term(io_lib:write(A, Depth), F, Adj, P, Pad);
control($P, [A,Depth], F, Adj, P, Pad, Enc, Str, I) when is_integer(Depth) ->
print(A, Depth, F, Adj, P, Pad, Enc, Str, I);
-control($s, [A], F, Adj, P, Pad, _Enc, _Str, _I) when is_atom(A) ->
+control($s, [A], F, Adj, P, Pad, latin1, _Str, _I) when is_atom(A) ->
+ L = iolist_to_chars(atom_to_list(A)),
+ string(L, F, Adj, P, Pad);
+control($s, [A], F, Adj, P, Pad, unicode, _Str, _I) when is_atom(A) ->
string(atom_to_list(A), F, Adj, P, Pad);
control($s, [L0], F, Adj, P, Pad, latin1, _Str, _I) ->
L = iolist_to_chars(L0),
diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl
index f4257fb571..2a0e3118d0 100644
--- a/lib/stdlib/src/otp_internal.erl
+++ b/lib/stdlib/src/otp_internal.erl
@@ -47,9 +47,6 @@ obsolete(Module, Name, Arity) ->
obsolete_1(net, _, _) ->
{deprecated, "module 'net' obsolete; use 'net_adm'"};
-obsolete_1(erlang, hash, 2) ->
- {deprecated, {erlang, phash2, 2}};
-
obsolete_1(erlang, now, 0) ->
{deprecated,
"Deprecated BIF. See the \"Time and Time Correction in Erlang\" "
@@ -553,6 +550,20 @@ obsolete_1(overload, _, _) ->
obsolete_1(rpc, safe_multi_server_call, A) when A =:= 2; A =:= 3 ->
{removed, {rpc, multi_server_call, A}};
+%% Added in OTP 20.
+
+obsolete_1(filename, find_src, 1) ->
+ {deprecated, "deprecated; use filelib:find_source/1 instead"};
+obsolete_1(filename, find_src, 2) ->
+ {deprecated, "deprecated; use filelib:find_source/3 instead"};
+
+%% Removed in OTP 20.
+
+obsolete_1(erlang, hash, 2) ->
+ {removed, {erlang, phash2, 2}, "20.0"};
+
+%% not obsolete
+
obsolete_1(_, _, _) ->
no.
diff --git a/lib/stdlib/src/proplists.erl b/lib/stdlib/src/proplists.erl
index 21de8c45c1..340dfdcac9 100644
--- a/lib/stdlib/src/proplists.erl
+++ b/lib/stdlib/src/proplists.erl
@@ -83,7 +83,7 @@ property(Key, Value) ->
%% ---------------------------------------------------------------------
-%% @doc Unfolds all occurences of atoms in <code>ListIn</code> to tuples
+%% @doc Unfolds all occurrences of atoms in <code>ListIn</code> to tuples
%% <code>{Atom, true}</code>.
%%
%% @see compact/1
diff --git a/lib/stdlib/src/shell_default.erl b/lib/stdlib/src/shell_default.erl
index cd63ab28b5..a0c1d98513 100644
--- a/lib/stdlib/src/shell_default.erl
+++ b/lib/stdlib/src/shell_default.erl
@@ -23,7 +23,7 @@
-module(shell_default).
--export([help/0,lc/1,c/1,c/2,nc/1,nl/1,l/1,i/0,pid/3,i/3,m/0,m/1,lm/0,mm/0,
+-export([help/0,lc/1,c/1,c/2,c/3,nc/1,nl/1,l/1,i/0,pid/3,i/3,m/0,m/1,lm/0,mm/0,
memory/0,memory/1,uptime/0,
erlangrc/1,bi/1, regs/0, flush/0,pwd/0,ls/0,ls/1,cd/1,
y/1, y/2,
@@ -72,6 +72,7 @@ bi(I) -> c:bi(I).
bt(Pid) -> c:bt(Pid).
c(File) -> c:c(File).
c(File, Opt) -> c:c(File, Opt).
+c(File, Opt, Filter) -> c:c(File, Opt, Filter).
cd(D) -> c:cd(D).
erlangrc(X) -> c:erlangrc(X).
flush() -> c:flush().
diff --git a/lib/stdlib/test/base64_SUITE.erl b/lib/stdlib/test/base64_SUITE.erl
index d0abe5c961..6ddc67464c 100644
--- a/lib/stdlib/test/base64_SUITE.erl
+++ b/lib/stdlib/test/base64_SUITE.erl
@@ -82,7 +82,7 @@ base64_decode(Config) when is_list(Config) ->
Alphabet = list_to_binary(lists:seq(0, 255)),
Alphabet = base64:decode(base64:encode(Alphabet)),
- %% Encoded base 64 strings may be devided by non base 64 chars.
+ %% Encoded base 64 strings may be divided by non base 64 chars.
%% In this cases whitespaces.
"0123456789!@#0^&*();:<>,. []{}" =
base64:decode_to_string(
diff --git a/lib/stdlib/test/beam_lib_SUITE.erl b/lib/stdlib/test/beam_lib_SUITE.erl
index 4521ecc0ef..279e15f703 100644
--- a/lib/stdlib/test/beam_lib_SUITE.erl
+++ b/lib/stdlib/test/beam_lib_SUITE.erl
@@ -81,12 +81,8 @@ normal(Conf) when is_list(Conf) ->
NoOfTables = length(ets:all()),
P0 = pps(),
- CompileFlags = [{outdir,PrivDir}, debug_info],
- {ok,_} = compile:file(Source, CompileFlags),
- {ok, Binary} = file:read_file(BeamFile),
-
- do_normal(BeamFile),
- do_normal(Binary),
+ do_normal(Source, PrivDir, BeamFile, []),
+ do_normal(Source, PrivDir, BeamFile, [no_utf8_atoms]),
{ok,_} = compile:file(Source, [{outdir,PrivDir}, no_debug_info]),
{ok, {simple, [{abstract_code, no_abstract_code}]}} =
@@ -101,7 +97,15 @@ normal(Conf) when is_list(Conf) ->
true = (P0 == pps()),
ok.
-do_normal(BeamFile) ->
+do_normal(Source, PrivDir, BeamFile, Opts) ->
+ CompileFlags = [{outdir,PrivDir}, debug_info | Opts],
+ {ok,_} = compile:file(Source, CompileFlags),
+ {ok, Binary} = file:read_file(BeamFile),
+
+ do_normal(BeamFile, Opts),
+ do_normal(Binary, Opts).
+
+do_normal(BeamFile, Opts) ->
Imports = {imports, [{erlang, get_module_info, 1},
{erlang, get_module_info, 2},
{lists, member, 2}]},
@@ -130,20 +134,31 @@ do_normal(BeamFile) ->
beam_lib:chunks(BeamFile, [abstract_code]),
%% Test reading optional chunks.
- All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT"],
+ All = ["Atom", "Code", "StrT", "ImpT", "ExpT", "FunT", "LitT", "AtU8"],
{ok,{simple,Chunks}} = beam_lib:chunks(BeamFile, All, [allow_missing_chunks]),
- verify_simple(Chunks).
+ case {verify_simple(Chunks),Opts} of
+ {{missing_chunk, AtomBin}, []} when is_binary(AtomBin) -> ok;
+ {{AtomBin, missing_chunk}, [no_utf8_atoms]} when is_binary(AtomBin) -> ok
+ end,
-verify_simple([{"Atom", AtomBin},
+ %% Make sure that reading the atom chunk works when the 'allow_missing_chunks'
+ %% option is used.
+ Some = ["Code",atoms,"ExpT","LitT"],
+ {ok,{simple,SomeChunks}} = beam_lib:chunks(BeamFile, Some, [allow_missing_chunks]),
+ [{"Code",<<_/binary>>},{atoms,[_|_]},{"ExpT",<<_/binary>>},{"LitT",missing_chunk}] =
+ SomeChunks.
+
+verify_simple([{"Atom", PlainAtomChunk},
{"Code", CodeBin},
{"StrT", StrBin},
{"ImpT", ImpBin},
{"ExpT", ExpBin},
{"FunT", missing_chunk},
- {"LitT", missing_chunk}])
- when is_binary(AtomBin), is_binary(CodeBin), is_binary(StrBin),
+ {"LitT", missing_chunk},
+ {"AtU8", AtU8Chunk}])
+ when is_binary(CodeBin), is_binary(StrBin),
is_binary(ImpBin), is_binary(ExpBin) ->
- ok.
+ {PlainAtomChunk, AtU8Chunk}.
%% Read invalid beam files.
error(Conf) when is_list(Conf) ->
@@ -211,7 +226,7 @@ last_chunk(Bin) ->
do_error(BeamFile, ACopy) ->
%% evil tests
Chunks = chunk_info(BeamFile),
- {value, {_, AtomStart, _}} = lists:keysearch("Atom", 1, Chunks),
+ {value, {_, AtomStart, _}} = lists:keysearch("AtU8", 1, Chunks),
{value, {_, ImportStart, _}} = lists:keysearch("ImpT", 1, Chunks),
{value, {_, AbstractStart, _}} = lists:keysearch("Abst", 1, Chunks),
{value, {_, AttributesStart, _}} =
@@ -234,7 +249,7 @@ do_error(BeamFile, ACopy) ->
verify(not_a_beam_file, beam_lib:info(BF7)),
BF8 = set_byte(ACopy, BeamFile, 13, 17),
- verify(missing_chunk, beam_lib:chunks(BF8, ["Atom"])),
+ verify(missing_chunk, beam_lib:chunks(BF8, ["AtU8"])),
BF9 = set_byte(ACopy, BeamFile, CompileInfoStart+10, 17),
verify(invalid_chunk, beam_lib:chunks(BF9, [compile_info])).
diff --git a/lib/stdlib/test/dets_SUITE.erl b/lib/stdlib/test/dets_SUITE.erl
index aa31fdde5a..95c9b47465 100644
--- a/lib/stdlib/test/dets_SUITE.erl
+++ b/lib/stdlib/test/dets_SUITE.erl
@@ -3012,8 +3012,13 @@ repair_continuation(Config) ->
MS = [{'_',[],[true]}],
- {[true], C1} = dets:select(Tab, MS, 1),
- C2 = binary_to_term(term_to_binary(C1)),
+ SRes = term_to_binary(dets:select(Tab, MS, 1)),
+ %% Get rid of compiled match spec
+ lists:foreach(fun (P) ->
+ garbage_collect(P)
+ end, processes()),
+ {[true], C2} = binary_to_term(SRes),
+
{'EXIT', {badarg, _}} = (catch dets:select(C2)),
C3 = dets:repair_continuation(C2, MS),
{[true], C4} = dets:select(C3),
diff --git a/lib/stdlib/test/edlin_expand_SUITE.erl b/lib/stdlib/test/edlin_expand_SUITE.erl
index 718d91c6a3..1f694ea549 100644
--- a/lib/stdlib/test/edlin_expand_SUITE.erl
+++ b/lib/stdlib/test/edlin_expand_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -21,7 +21,8 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_testcase/2, end_per_testcase/2,
init_per_group/2,end_per_group/2]).
--export([normal/1, quoted_fun/1, quoted_module/1, quoted_both/1, erl_1152/1]).
+-export([normal/1, quoted_fun/1, quoted_module/1, quoted_both/1, erl_1152/1,
+ erl_352/1]).
-include_lib("common_test/include/ct.hrl").
@@ -36,7 +37,7 @@ suite() ->
{timetrap,{minutes,1}}].
all() ->
- [normal, quoted_fun, quoted_module, quoted_both, erl_1152].
+ [normal, quoted_fun, quoted_module, quoted_both, erl_1152, erl_352].
groups() ->
[].
@@ -153,6 +154,78 @@ erl_1152(Config) when is_list(Config) ->
"\n"++"foo"++" "++[1089]++_ = do_format(["foo",[1089]]),
ok.
+erl_352(Config) when is_list(Config) ->
+ erl_352_test(3, 3),
+
+ erl_352_test(3, 75),
+ erl_352_test(3, 76, [trailing]),
+ erl_352_test(4, 74),
+ erl_352_test(4, 75, [leading]),
+ erl_352_test(4, 76, [leading, trailing]),
+
+ erl_352_test(75, 3),
+ erl_352_test(76, 3, [leading]),
+ erl_352_test(74, 4),
+ erl_352_test(75, 4, [leading]),
+ erl_352_test(76, 4, [leading]),
+
+ erl_352_test(74, 74, [leading]),
+ erl_352_test(74, 75, [leading]),
+ erl_352_test(74, 76, [leading, trailing]).
+
+erl_352_test(PrefixLen, SuffixLen) ->
+ erl_352_test(PrefixLen, SuffixLen, []).
+
+erl_352_test(PrefixLen, SuffixLen, Dots) ->
+ io:format("\nPrefixLen = ~w, SuffixLen = ~w\n", [PrefixLen, SuffixLen]),
+
+ PrefixM = lists:duplicate(PrefixLen, $p),
+ SuffixM = lists:duplicate(SuffixLen, $s),
+ LM = [PrefixM ++ S ++ SuffixM || S <- ["1", "2"]],
+ StrM = do_format(LM),
+ check_leading(StrM, "", PrefixM, SuffixM, Dots),
+
+ PrefixF = lists:duplicate(PrefixLen, $p),
+ SuffixF = lists:duplicate(SuffixLen-2, $s),
+ LF = [{PrefixF ++ S ++ SuffixF, 1} || S <- ["1", "2"]],
+ StrF = do_format(LF),
+ true = check_leading(StrF, "/1", PrefixF, SuffixF, Dots),
+
+ ok.
+
+check_leading(FormStr, ArityStr, Prefix, Suffix, Dots) ->
+ List = string:tokens(FormStr, "\n "),
+ io:format("~p\n", [List]),
+ true = lists:all(fun(L) -> length(L) < 80 end, List),
+ case lists:member(leading, Dots) of
+ true ->
+ true = lists:all(fun(L) ->
+ {"...", Rest} = lists:split(3, L),
+ check_trailing(Rest, ArityStr,
+ Suffix, Dots)
+ end, List);
+ false ->
+ true = lists:all(fun(L) ->
+ {Prefix, Rest} =
+ lists:split(length(Prefix), L),
+ check_trailing(Rest, ArityStr,
+ Suffix, Dots)
+ end, List)
+ end.
+
+check_trailing([I|Str], ArityStr, Suffix, Dots) ->
+ true = lists:member(I, [$1, $2]),
+ case lists:member(trailing, Dots) of
+ true ->
+ {Rest, "..." ++ ArityStr} =
+ lists:split(length(Str) - (3 + length(ArityStr)), Str),
+ true = lists:prefix(Rest, Suffix);
+ false ->
+ {Rest, ArityStr} =
+ lists:split(length(Str) - length(ArityStr), Str),
+ Rest =:= Suffix
+ end.
+
do_expand(String) ->
edlin_expand:expand(lists:reverse(String)).
diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl
index c90f855b3b..c7dcd9ae16 100644
--- a/lib/stdlib/test/erl_lint_SUITE.erl
+++ b/lib/stdlib/test/erl_lint_SUITE.erl
@@ -2002,22 +2002,22 @@ otp_5362(Config) when is_list(Config) ->
<<"-compile(nowarn_deprecated_function).
-compile(nowarn_bif_clash).
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
- {[nowarn_unused_function,
+ {[nowarn_unused_function,
warn_deprecated_function,
warn_bif_clash]},
{error,
[{5,erl_lint,{call_to_redefined_old_bif,{spawn,1}}}],
- [{4,erl_lint,{deprecated,{erlang,hash,2},{erlang,phash2,2},
- "a future release"}}]}},
-
+ [{4,erl_lint,{deprecated,{erlang,now,0},
+ "Deprecated BIF. See the \"Time and Time Correction in Erlang\" "
+ "chapter of the ERTS User's Guide for more information."}}]}},
{otp_5362_5,
<<"-compile(nowarn_deprecated_function).
-compile(nowarn_bif_clash).
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
{[nowarn_unused_function]},
@@ -2026,37 +2026,37 @@ otp_5362(Config) when is_list(Config) ->
%% The special nowarn_X are not affected by general warn_X.
{otp_5362_6,
- <<"-compile({nowarn_deprecated_function,{erlang,hash,2}}).
+ <<"-compile({nowarn_deprecated_function,{erlang,now,0}}).
-compile({nowarn_bif_clash,{spawn,1}}).
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
- {[nowarn_unused_function,
- warn_deprecated_function,
+ {[nowarn_unused_function,
+ warn_deprecated_function,
warn_bif_clash]},
{errors,
[{2,erl_lint,disallowed_nowarn_bif_clash}],[]}},
{otp_5362_7,
<<"-export([spawn/1]).
- -compile({nowarn_deprecated_function,{erlang,hash,2}}).
+ -compile({nowarn_deprecated_function,{erlang,now,0}}).
-compile({nowarn_bif_clash,{spawn,1}}).
-compile({nowarn_bif_clash,{spawn,2}}). % bad
-compile([{nowarn_deprecated_function,
- [{erlang,hash,-1},{3,hash,-1}]}, % 2 bad
- {nowarn_deprecated_function, {{a,b,c},hash,-1}}]). % bad
+ [{erlang,now,-1},{3,now,-1}]}, % 2 bad
+ {nowarn_deprecated_function, {{a,b,c},now,-1}}]). % bad
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
{[nowarn_unused_function]},
{error,[{3,erl_lint,disallowed_nowarn_bif_clash},
{4,erl_lint,disallowed_nowarn_bif_clash},
{4,erl_lint,{bad_nowarn_bif_clash,{spawn,2}}}],
- [{5,erl_lint,{bad_nowarn_deprecated_function,{3,hash,-1}}},
- {5,erl_lint,{bad_nowarn_deprecated_function,{erlang,hash,-1}}},
- {5,erl_lint,{bad_nowarn_deprecated_function,{{a,b,c},hash,-1}}}]}
+ [{5,erl_lint,{bad_nowarn_deprecated_function,{3,now,-1}}},
+ {5,erl_lint,{bad_nowarn_deprecated_function,{erlang,now,-1}}},
+ {5,erl_lint,{bad_nowarn_deprecated_function,{{a,b,c},now,-1}}}]}
},
{otp_5362_8,
@@ -2064,14 +2064,15 @@ otp_5362(Config) when is_list(Config) ->
-compile(warn_deprecated_function).
-compile(warn_bif_clash).
spawn(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
{[nowarn_unused_function,
{nowarn_bif_clash,{spawn,1}}]}, % has no effect
{warnings,
- [{5,erl_lint,{deprecated,{erlang,hash,2},{erlang,phash2,2},
- "a future release"}}]}},
+ [{5,erl_lint,{deprecated,{erlang,now,0},
+ "Deprecated BIF. See the \"Time and Time Correction in Erlang\" "
+ "chapter of the ERTS User's Guide for more information."}}]}},
{otp_5362_9,
<<"-include_lib(\"stdlib/include/qlc.hrl\").
@@ -2083,11 +2084,11 @@ otp_5362(Config) when is_list(Config) ->
[]},
{otp_5362_10,
- <<"-compile({nowarn_deprecated_function,{erlang,hash,2}}).
+ <<"-compile({nowarn_deprecated_function,{erlang,now,0}}).
-compile({nowarn_bif_clash,{spawn,1}}).
-import(x,[spawn/1]).
spin(A) ->
- erlang:hash(A, 3000),
+ erlang:now(),
spawn(A).
">>,
{[nowarn_unused_function,
@@ -2097,11 +2098,11 @@ otp_5362(Config) when is_list(Config) ->
[{2,erl_lint,disallowed_nowarn_bif_clash}],[]}},
{call_deprecated_function,
- <<"t(X) -> erlang:hash(X, 2000).">>,
+ <<"t(X) -> crypto:md5(X).">>,
[],
{warnings,
- [{1,erl_lint,{deprecated,{erlang,hash,2},
- {erlang,phash2,2},"a future release"}}]}},
+ [{1,erl_lint,{deprecated,{crypto,md5,1},
+ {crypto,hash,2}, "a future release"}}]}},
{call_removed_function,
<<"t(X) -> regexp:match(X).">>,
diff --git a/lib/stdlib/test/erl_scan_SUITE.erl b/lib/stdlib/test/erl_scan_SUITE.erl
index 4ae734eb65..7d0ba967f9 100644
--- a/lib/stdlib/test/erl_scan_SUITE.erl
+++ b/lib/stdlib/test/erl_scan_SUITE.erl
@@ -772,10 +772,9 @@ unicode() ->
erl_scan:string([1089]),
{error,{{1,1},erl_scan,{illegal,character}},{1,2}} =
erl_scan:string([1089], {1,1}),
- {error,{1,erl_scan,{illegal,atom}},1} =
- erl_scan:string("'a"++[1089]++"b'", 1),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,6}} =
- erl_scan:string("'a"++[1089]++"b'", {1,1}),
+ {error,{{1,3},erl_scan,{illegal,character}},{1,4}} =
+ erl_scan:string("'a" ++ [999999999] ++ "c'", {1,1}),
+
test("\"a"++[1089]++"b\""),
{ok,[{char,1,1}],1} =
erl_scan_string([$$,$\\,$^,1089], 1),
@@ -786,8 +785,8 @@ unicode() ->
erl_scan:format_error(Error),
{error,{{1,1},erl_scan,_},{1,11}} =
erl_scan:string("\"qa\\x{aaa}",{1,1}),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,12}} =
- erl_scan:string("'qa\\x{aaa}'",{1,1}),
+ {error,{{1,1},erl_scan,_},{1,11}} =
+ erl_scan:string("'qa\\x{aaa}",{1,1}),
{ok,[{char,1,1089}],1} =
erl_scan_string([$$,1089], 1),
@@ -904,9 +903,9 @@ more_chars() ->
%% OTP-10302. Unicode characters scanner/parser.
otp_10302(Config) when is_list(Config) ->
%% From unicode():
- {error,{1,erl_scan,{illegal,atom}},1} =
+ {ok,[{atom,1,'aсb'}],1} =
erl_scan:string("'a"++[1089]++"b'", 1),
- {error,{{1,1},erl_scan,{illegal,atom}},{1,12}} =
+ {ok,[{atom,{1,1},'qaપ'}],{1,12}} =
erl_scan:string("'qa\\x{aaa}'",{1,1}),
{ok,[{char,1,1089}],1} = erl_scan_string([$$,1089], 1),
diff --git a/lib/stdlib/test/ets_SUITE.erl b/lib/stdlib/test/ets_SUITE.erl
index f68d5eca3f..8581440d58 100644
--- a/lib/stdlib/test/ets_SUITE.erl
+++ b/lib/stdlib/test/ets_SUITE.erl
@@ -22,7 +22,7 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
-export([default/1,setbag/1,badnew/1,verybadnew/1,named/1,keypos2/1,
- privacy/1,privacy_owner/2]).
+ privacy/1]).
-export([empty/1,badinsert/1]).
-export([time_lookup/1,badlookup/1,lookup_order/1]).
-export([delete_elem/1,delete_tab/1,delete_large_tab/1,
@@ -82,27 +82,6 @@
%% Convenience for manual testing
-export([random_test/0]).
-%% internal exports
--export([dont_make_worse_sub/0, make_better_sub1/0, make_better_sub2/0]).
--export([t_repair_continuation_do/1, t_bucket_disappears_do/1,
- select_fail_do/1, whitebox_1/1, whitebox_2/1, t_delete_all_objects_do/1,
- t_delete_object_do/1, t_init_table_do/1, t_insert_list_do/1,
- update_element_opts/1, update_element_opts/4, update_element/4, update_element_do/4,
- update_element_neg/1, update_element_neg_do/1, update_counter_do/1, update_counter_neg/1,
- evil_update_counter_do/1, fixtable_next_do/1, heir_do/1, give_away_do/1, setopts_do/1,
- rename_do/1, rename_unnamed_do/1, interface_equality_do/1, ordered_match_do/1,
- ordered_do/1, privacy_do/1, empty_do/1, badinsert_do/1, time_lookup_do/1,
- lookup_order_do/1, lookup_element_mult_do/1, delete_tab_do/1, delete_elem_do/1,
- match_delete_do/1, match_delete3_do/1, firstnext_do/1,
- slot_do/1, match1_do/1, match2_do/1, match_object_do/1, match_object2_do/1,
- misc1_do/1, safe_fixtable_do/1, info_do/1, dups_do/1, heavy_lookup_do/1,
- heavy_lookup_element_do/1, member_do/1, otp_5340_do/1, otp_7665_do/1, meta_wb_do/1,
- do_heavy_concurrent/1, tab2file2_do/2, exit_large_table_owner_do/2,
- types_do/1, sleeper/0, memory_do/1, update_counter_with_default_do/1,
- update_counter_table_growth_do/1,
- ms_tracee_dummy/1, ms_tracee_dummy/2, ms_tracee_dummy/3, ms_tracee_dummy/4
- ]).
-
-export([t_select_reverse/1]).
-include_lib("common_test/include/ct.hrl").
@@ -228,7 +207,7 @@ memory_check_summary(_Config) ->
%% Test that a disappearing bucket during select of a non-fixed table works.
t_bucket_disappears(Config) when is_list(Config) ->
- repeat_for_opts(t_bucket_disappears_do).
+ repeat_for_opts(fun t_bucket_disappears_do/1).
t_bucket_disappears_do(Opts) ->
EtsMem = etsmem(),
@@ -396,11 +375,16 @@ ms_tracer_collect(Tracee, Ref, Acc) ->
ms_tracee(Parent, CallArgList) ->
Parent ! {self(), ready},
receive start -> ok end,
- lists:foreach(fun(Args) ->
- erlang:apply(?MODULE, ms_tracee_dummy, tuple_to_list(Args))
- end, CallArgList).
-
-
+ F = fun({A1}) ->
+ ms_tracee_dummy(A1);
+ ({A1,A2}) ->
+ ms_tracee_dummy(A1, A2);
+ ({A1,A2,A3}) ->
+ ms_tracee_dummy(A1, A2, A3);
+ ({A1,A2,A3,A4}) ->
+ ms_tracee_dummy(A1, A2, A3, A4)
+ end,
+ lists:foreach(F, CallArgList).
ms_tracee_dummy(_) -> ok.
ms_tracee_dummy(_,_) -> ok.
@@ -418,7 +402,7 @@ assert_eq(A,B) ->
%% Test ets:repair_continuation/2.
t_repair_continuation(Config) when is_list(Config) ->
- repeat_for_opts(t_repair_continuation_do).
+ repeat_for_opts(fun t_repair_continuation_do/1).
t_repair_continuation_do(Opts) ->
@@ -564,7 +548,8 @@ default(Config) when is_list(Config) ->
%% Test that select fails even if nothing can match.
select_fail(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(select_fail_do, [all_types,write_concurrency]),
+ repeat_for_opts(fun select_fail_do/1,
+ [all_types,write_concurrency]),
verify_etsmem(EtsMem).
select_fail_do(Opts) ->
@@ -594,7 +579,7 @@ select_fail_do(Opts) ->
%% Whitebox test of ets:info(X, memory).
memory(Config) when is_list(Config) ->
ok = chk_normal_tab_struct_size(),
- repeat_for_opts(memory_do,[compressed]),
+ repeat_for_opts(fun memory_do/1, [compressed]),
catch erts_debug:set_internal_state(available_internal_state, false).
memory_do(Opts) ->
@@ -704,12 +689,12 @@ adjust_xmem([_T1,_T2,_T3,_T4], {A0,B0,C0,D0} = _Mem0, EstCnt) ->
%% Misc. whitebox tests
t_whitebox(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(whitebox_1),
- repeat_for_opts(whitebox_1),
- repeat_for_opts(whitebox_1),
- repeat_for_opts(whitebox_2),
- repeat_for_opts(whitebox_2),
- repeat_for_opts(whitebox_2),
+ repeat_for_opts(fun whitebox_1/1),
+ repeat_for_opts(fun whitebox_1/1),
+ repeat_for_opts(fun whitebox_1/1),
+ repeat_for_opts(fun whitebox_2/1),
+ repeat_for_opts(fun whitebox_2/1),
+ repeat_for_opts(fun whitebox_2/1),
verify_etsmem(EtsMem).
whitebox_1(Opts) ->
@@ -774,7 +759,7 @@ check_badarg({'EXIT', {badarg, [{M,F,A,_} | _]}}, M, F, Args) ->
%% Test ets:delete_all_objects/1.
t_delete_all_objects(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(t_delete_all_objects_do),
+ repeat_for_opts(fun t_delete_all_objects_do/1),
verify_etsmem(EtsMem).
get_kept_objects(T) ->
@@ -808,7 +793,7 @@ t_delete_all_objects_do(Opts) ->
%% Test ets:delete_object/2.
t_delete_object(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(t_delete_object_do),
+ repeat_for_opts(fun t_delete_object_do/1),
verify_etsmem(EtsMem).
t_delete_object_do(Opts) ->
@@ -881,7 +866,7 @@ make_init_fun(N) ->
%% Test ets:init_table/2.
t_init_table(Config) when is_list(Config)->
EtsMem = etsmem(),
- repeat_for_opts(t_init_table_do),
+ repeat_for_opts(fun t_init_table_do/1),
verify_etsmem(EtsMem).
t_init_table_do(Opts) ->
@@ -957,7 +942,7 @@ t_insert_new(Config) when is_list(Config) ->
%% Test ets:insert/2 with list of objects.
t_insert_list(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(t_insert_list_do),
+ repeat_for_opts(fun t_insert_list_do/1),
verify_etsmem(EtsMem).
t_insert_list_do(Opts) ->
@@ -1187,7 +1172,7 @@ partly_bound(Config) when is_list(Config) ->
end.
dont_make_worse() ->
- seventyfive_percent_success({?MODULE,dont_make_worse_sub,[]},0,0,10).
+ seventyfive_percent_success(fun dont_make_worse_sub/0, 0, 0, 10).
dont_make_worse_sub() ->
T = build_table([a,b],[a,b],15000),
@@ -1199,8 +1184,9 @@ dont_make_worse_sub() ->
ok.
make_better() ->
- fifty_percent_success({?MODULE,make_better_sub2,[]},0,0,10),
- fifty_percent_success({?MODULE,make_better_sub1,[]},0,0,10).
+ fifty_percent_success(fun make_better_sub2/0, 0, 0, 10),
+ fifty_percent_success(fun make_better_sub1/0, 0, 0, 10).
+
make_better_sub1() ->
T = build_table2([a,b],[a,b],15000),
T1 = time_match_object(T,{'_',1500,a,a}, [{{1500,a,a},1500,a,a}]),
@@ -1485,7 +1471,7 @@ do_random_test() ->
%% Ttest various variants of update_element.
update_element(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(update_element_opts),
+ repeat_for_opts(fun update_element_opts/1),
verify_etsmem(EtsMem).
update_element_opts(Opts) ->
@@ -1647,7 +1633,7 @@ update_element_neg_do(T) ->
%% test various variants of update_counter.
update_counter(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(update_counter_do),
+ repeat_for_opts(fun update_counter_do/1),
verify_etsmem(EtsMem).
update_counter_do(Opts) ->
@@ -1868,7 +1854,7 @@ evil_update_counter(Config) when is_list(Config) ->
ordsets:module_info(),
rand:module_info(),
- repeat_for_opts(evil_update_counter_do).
+ repeat_for_opts(fun evil_update_counter_do/1).
evil_update_counter_do(Opts) ->
EtsMem = etsmem(),
@@ -1915,7 +1901,7 @@ evil_counter_1(Iter, T) ->
evil_counter_1(Iter-1, T).
update_counter_with_default(Config) when is_list(Config) ->
- repeat_for_opts(update_counter_with_default_do).
+ repeat_for_opts(fun update_counter_with_default_do/1).
update_counter_with_default_do(Opts) ->
T1 = ets_new(a, [set | Opts]),
@@ -1953,7 +1939,7 @@ update_counter_with_default_do(Opts) ->
ok.
update_counter_table_growth(_Config) ->
- repeat_for_opts(update_counter_table_growth_do).
+ repeat_for_opts(fun update_counter_table_growth_do/1).
update_counter_table_growth_do(Opts) ->
Set = ets_new(b, [set | Opts]),
@@ -1964,7 +1950,8 @@ update_counter_table_growth_do(Opts) ->
%% Check that a first-next sequence always works on a fixed table.
fixtable_next(Config) when is_list(Config) ->
- repeat_for_opts(fixtable_next_do, [write_concurrency,all_types]).
+ repeat_for_opts(fun fixtable_next_do/1,
+ [write_concurrency,all_types]).
fixtable_next_do(Opts) ->
EtsMem = etsmem(),
@@ -2104,7 +2091,7 @@ write_concurrency(Config) when is_list(Config) ->
%% The 'heir' option.
heir(Config) when is_list(Config) ->
- repeat_for_opts(heir_do).
+ repeat_for_opts(fun heir_do/1).
heir_do(Opts) ->
EtsMem = etsmem(),
@@ -2244,7 +2231,7 @@ heir_1(HeirData,Mode,Opts) ->
%% Test ets:give_way/3.
give_away(Config) when is_list(Config) ->
- repeat_for_opts(give_away_do).
+ repeat_for_opts(fun give_away_do/1).
give_away_do(Opts) ->
T = ets_new(foo,[named_table, private | Opts]),
@@ -2325,7 +2312,7 @@ give_away_receiver(T, Giver) ->
%% Test ets:setopts/2.
setopts(Config) when is_list(Config) ->
- repeat_for_opts(setopts_do,[write_concurrency,all_types]).
+ repeat_for_opts(fun setopts_do/1, [write_concurrency,all_types]).
setopts_do(Opts) ->
Self = self(),
@@ -2475,7 +2462,7 @@ bad_table_call(T,{F,Args,_,{return,Return}}) ->
%% Check rename of ets tables.
rename(Config) when is_list(Config) ->
- repeat_for_opts(rename_do, [write_concurrency, all_types]).
+ repeat_for_opts(fun rename_do/1, [write_concurrency, all_types]).
rename_do(Opts) ->
EtsMem = etsmem(),
@@ -2490,7 +2477,8 @@ rename_do(Opts) ->
%% Check rename of unnamed ets table.
rename_unnamed(Config) when is_list(Config) ->
- repeat_for_opts(rename_unnamed_do,[write_concurrency,all_types]).
+ repeat_for_opts(fun rename_unnamed_do/1,
+ [write_concurrency,all_types]).
rename_unnamed_do(Opts) ->
EtsMem = etsmem(),
@@ -2565,7 +2553,7 @@ evil_create_fixed_tab() ->
%% Tests that the return values and errors are equal for set's and
%% ordered_set's where applicable.
interface_equality(Config) when is_list(Config) ->
- repeat_for_opts(interface_equality_do).
+ repeat_for_opts(fun interface_equality_do/1).
interface_equality_do(Opts) ->
EtsMem = etsmem(),
@@ -2629,7 +2617,7 @@ maybe_sort(Any) ->
%% Test match, match_object and match_delete in ordered set's.
ordered_match(Config) when is_list(Config)->
- repeat_for_opts(ordered_match_do).
+ repeat_for_opts(fun ordered_match_do/1).
ordered_match_do(Opts) ->
EtsMem = etsmem(),
@@ -2675,7 +2663,7 @@ ordered_match_do(Opts) ->
%% Test basic functionality in ordered_set's.
ordered(Config) when is_list(Config) ->
- repeat_for_opts(ordered_do).
+ repeat_for_opts(fun ordered_do/1).
ordered_do(Opts) ->
EtsMem = etsmem(),
@@ -2801,12 +2789,13 @@ keypos2(Config) when is_list(Config) ->
%% Privacy check. Check that a named(public/private/protected) table
%% cannot be read by the wrong process(es).
privacy(Config) when is_list(Config) ->
- repeat_for_opts(privacy_do).
+ repeat_for_opts(fun privacy_do/1).
privacy_do(Opts) ->
EtsMem = etsmem(),
process_flag(trap_exit,true),
- Owner = my_spawn_link(?MODULE,privacy_owner,[self(),Opts]),
+ Parent = self(),
+ Owner = my_spawn_link(fun() -> privacy_owner(Parent, Opts) end),
receive
{'EXIT',Owner,Reason} ->
exit({privacy_test,Reason});
@@ -2886,7 +2875,7 @@ rotate_tuple(Tuple, N) ->
%% Check lookup in an empty table and lookup of a non-existing key.
empty(Config) when is_list(Config) ->
- repeat_for_opts(empty_do).
+ repeat_for_opts(fun empty_do/1).
empty_do(Opts) ->
EtsMem = etsmem(),
@@ -2899,7 +2888,7 @@ empty_do(Opts) ->
%% Check proper return values for illegal insert operations.
badinsert(Config) when is_list(Config) ->
- repeat_for_opts(badinsert_do).
+ repeat_for_opts(fun badinsert_do/1).
badinsert_do(Opts) ->
EtsMem = etsmem(),
@@ -2923,7 +2912,7 @@ badinsert_do(Opts) ->
time_lookup(Config) when is_list(Config) ->
%% just for timing, really
EtsMem = etsmem(),
- Values = repeat_for_opts(time_lookup_do),
+ Values = repeat_for_opts(fun time_lookup_do/1),
verify_etsmem(EtsMem),
{comment,lists:flatten(io_lib:format(
"~p ets lookups/s",[Values]))}.
@@ -2957,7 +2946,8 @@ badlookup(Config) when is_list(Config) ->
%% Test that lookup returns objects in order of insertion for bag and dbag.
lookup_order(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(lookup_order_do, [write_concurrency,[bag,duplicate_bag]]),
+ repeat_for_opts(fun lookup_order_do/1,
+ [write_concurrency,[bag,duplicate_bag]]),
verify_etsmem(EtsMem),
ok.
@@ -3048,7 +3038,7 @@ fill_tab(Tab,Val) ->
%% OTP-2386. Multiple return elements.
lookup_element_mult(Config) when is_list(Config) ->
- repeat_for_opts(lookup_element_mult_do).
+ repeat_for_opts(fun lookup_element_mult_do/1).
lookup_element_mult_do(Opts) ->
EtsMem = etsmem(),
@@ -3086,7 +3076,8 @@ lem_crash_3(T) ->
%% Check delete of an element inserted in a `filled' table.
delete_elem(Config) when is_list(Config) ->
- repeat_for_opts(delete_elem_do, [write_concurrency, all_types]).
+ repeat_for_opts(fun delete_elem_do/1,
+ [write_concurrency, all_types]).
delete_elem_do(Opts) ->
EtsMem = etsmem(),
@@ -3103,7 +3094,8 @@ delete_elem_do(Opts) ->
%% Check that ets:delete() works and releases the name of the
%% deleted table.
delete_tab(Config) when is_list(Config) ->
- repeat_for_opts(delete_tab_do,[write_concurrency,all_types]).
+ repeat_for_opts(fun delete_tab_do/1,
+ [write_concurrency,all_types]).
delete_tab_do(Opts) ->
Name = foo,
@@ -3301,10 +3293,14 @@ exit_large_table_owner(Config) when is_list(Config) ->
end, 1)
end,
EtsMem = etsmem(),
- repeat_for_opts({exit_large_table_owner_do,{FEData,Config}}),
+ repeat_for_opts(fun(Opts) ->
+ exit_large_table_owner_do(Opts,
+ FEData,
+ Config)
+ end),
verify_etsmem(EtsMem).
-exit_large_table_owner_do(Opts,{FEData,Config}) ->
+exit_large_table_owner_do(Opts, FEData, Config) ->
verify_rescheduling_exit(Config, FEData, [named_table | Opts], true, 1, 1),
verify_rescheduling_exit(Config, FEData, Opts, false, 1, 1).
@@ -3472,7 +3468,8 @@ baddelete(Config) when is_list(Config) ->
%% Check that match_delete works. Also tests tab2list function.
match_delete(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(match_delete_do,[write_concurrency,all_types]),
+ repeat_for_opts(fun match_delete_do/1,
+ [write_concurrency,all_types]),
verify_etsmem(EtsMem).
match_delete_do(Opts) ->
@@ -3489,7 +3486,7 @@ match_delete_do(Opts) ->
%% OTP-3005: check match_delete with constant argument.
match_delete3(Config) when is_list(Config) ->
- repeat_for_opts(match_delete3_do).
+ repeat_for_opts(fun match_delete3_do/1).
match_delete3_do(Opts) ->
EtsMem = etsmem(),
@@ -3514,7 +3511,7 @@ match_delete3_do(Opts) ->
%% Test ets:first/1 & ets:next/2.
firstnext(Config) when is_list(Config) ->
- repeat_for_opts(firstnext_do).
+ repeat_for_opts(fun firstnext_do/1).
firstnext_do(Opts) ->
EtsMem = etsmem(),
@@ -3572,7 +3569,7 @@ dyn_lookup(T, K) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
slot(Config) when is_list(Config) ->
- repeat_for_opts(slot_do).
+ repeat_for_opts(fun slot_do/1).
slot_do(Opts) ->
EtsMem = etsmem(),
@@ -3597,7 +3594,7 @@ slot_loop(Tab,SlotNo,EltsSoFar) ->
match1(Config) when is_list(Config) ->
- repeat_for_opts(match1_do).
+ repeat_for_opts(fun match1_do/1).
match1_do(Opts) ->
EtsMem = etsmem(),
@@ -3633,7 +3630,7 @@ match1_do(Opts) ->
%% Test match with specified keypos bag table.
match2(Config) when is_list(Config) ->
- repeat_for_opts(match2_do).
+ repeat_for_opts(fun match2_do/1).
match2_do(Opts) ->
EtsMem = etsmem(),
@@ -3660,7 +3657,7 @@ match2_do(Opts) ->
%% Some ets:match_object tests.
match_object(Config) when is_list(Config) ->
- repeat_for_opts(match_object_do).
+ repeat_for_opts(fun match_object_do/1).
match_object_do(Opts) ->
EtsMem = etsmem(),
@@ -3760,7 +3757,7 @@ match_object_do(Opts) ->
%% Tests that db_match_object does not generate a `badarg' when
%% resuming a search with no previous matches.
match_object2(Config) when is_list(Config) ->
- repeat_for_opts(match_object2_do).
+ repeat_for_opts(fun match_object2_do/1).
match_object2_do(Opts) ->
EtsMem = etsmem(),
@@ -3796,7 +3793,7 @@ tab2list(Config) when is_list(Config) ->
%% Simple general small test. If this fails, ets is in really bad
%% shape.
misc1(Config) when is_list(Config) ->
- repeat_for_opts(misc1_do).
+ repeat_for_opts(fun misc1_do/1).
misc1_do(Opts) ->
EtsMem = etsmem(),
@@ -3814,7 +3811,7 @@ misc1_do(Opts) ->
%% Check the safe_fixtable function.
safe_fixtable(Config) when is_list(Config) ->
- repeat_for_opts(safe_fixtable_do).
+ repeat_for_opts(fun safe_fixtable_do/1).
safe_fixtable_do(Opts) ->
EtsMem = etsmem(),
@@ -3872,7 +3869,7 @@ safe_fixtable_do(Opts) ->
%% Tests ets:info result for required tuples.
info(Config) when is_list(Config) ->
- repeat_for_opts(info_do).
+ repeat_for_opts(fun info_do/1).
info_do(Opts) ->
EtsMem = etsmem(),
@@ -3904,7 +3901,7 @@ info_do(Opts) ->
%% Test various duplicate_bags stuff.
dups(Config) when is_list(Config) ->
- repeat_for_opts(dups_do).
+ repeat_for_opts(fun dups_do/1).
dups_do(Opts) ->
EtsMem = etsmem(),
@@ -3970,7 +3967,9 @@ tab2file_do(FName, Opts) ->
%% Check the ets:tab2file function on a filled set/bag type ets table.
tab2file2(Config) when is_list(Config) ->
- repeat_for_opts({tab2file2_do,Config}, [[set,bag],compressed]).
+ repeat_for_opts(fun(Opts) ->
+ tab2file2_do(Opts, Config)
+ end, [[set,bag],compressed]).
tab2file2_do(Opts, Config) ->
EtsMem = etsmem(),
@@ -4234,7 +4233,7 @@ make_sub_binary(List, Num) when is_list(List) ->
%% Perform multiple lookups for every key in a large table.
heavy_lookup(Config) when is_list(Config) ->
- repeat_for_opts(heavy_lookup_do).
+ repeat_for_opts(fun heavy_lookup_do/1).
heavy_lookup_do(Opts) ->
EtsMem = etsmem(),
@@ -4257,7 +4256,7 @@ do_lookup(Tab, N) ->
%% Perform multiple lookups for every element in a large table.
heavy_lookup_element(Config) when is_list(Config) ->
- repeat_for_opts(heavy_lookup_element_do).
+ repeat_for_opts(fun heavy_lookup_element_do/1).
heavy_lookup_element_do(Opts) ->
EtsMem = etsmem(),
@@ -4285,7 +4284,7 @@ do_lookup_element(Tab, N, M) ->
heavy_concurrent(Config) when is_list(Config) ->
ct:timetrap({minutes,30}), %% valgrind needs a lot of time
- repeat_for_opts(do_heavy_concurrent).
+ repeat_for_opts(fun do_heavy_concurrent/1).
do_heavy_concurrent(Opts) ->
Size = 10000,
@@ -4370,7 +4369,7 @@ foldr_ordered(Config) when is_list(Config) ->
%% Test ets:member BIF.
member(Config) when is_list(Config) ->
- repeat_for_opts(member_do, [write_concurrency, all_types]).
+ repeat_for_opts(fun member_do/1, [write_concurrency, all_types]).
member_do(Opts) ->
EtsMem = etsmem(),
@@ -4453,26 +4452,26 @@ time_match(Tab,Match) ->
seventyfive_percent_success(_,S,Fa,0) ->
true = (S > ((S + Fa) * 0.75));
-seventyfive_percent_success({M,F,A},S,Fa,N) ->
- case (catch apply(M,F,A)) of
- {'EXIT', _} ->
- seventyfive_percent_success({M,F,A},S,Fa+1,N-1);
- _ ->
- seventyfive_percent_success({M,F,A},S+1,Fa,N-1)
+seventyfive_percent_success(F, S, Fa, N) when is_function(F, 0) ->
+ try F() of
+ _ ->
+ seventyfive_percent_success(F, S+1, Fa, N-1)
+ catch error:_ ->
+ seventyfive_percent_success(F, S, Fa+1, N-1)
end.
fifty_percent_success(_,S,Fa,0) ->
true = (S > ((S + Fa) * 0.5));
-fifty_percent_success({M,F,A},S,Fa,N) ->
- case (catch apply(M,F,A)) of
- {'EXIT', _} ->
- fifty_percent_success({M,F,A},S,Fa+1,N-1);
- _ ->
- fifty_percent_success({M,F,A},S+1,Fa,N-1)
+fifty_percent_success(F, S, Fa, N) when is_function(F, 0) ->
+ try F() of
+ _ ->
+ fifty_percent_success(F, S+1, Fa, N-1)
+ catch
+ error:_ ->
+ fifty_percent_success(F, S, Fa+1, N-1)
end.
-
create_random_string(0) ->
[];
@@ -4811,7 +4810,7 @@ otp_6338(Config) when is_list(Config) ->
%% Elements could come in the wrong order in a bag if a rehash occurred.
otp_5340(Config) when is_list(Config) ->
- repeat_for_opts(otp_5340_do).
+ repeat_for_opts(fun otp_5340_do/1).
otp_5340_do(Opts) ->
N = 3000,
@@ -4847,7 +4846,7 @@ verify2(_Err, _) ->
%% delete_object followed by delete on fixed bag failed to delete objects.
otp_7665(Config) when is_list(Config) ->
- repeat_for_opts(otp_7665_do).
+ repeat_for_opts(fun otp_7665_do/1).
otp_7665_do(Opts) ->
Tab = ets_new(otp_7665,[bag | Opts]),
@@ -4877,7 +4876,7 @@ otp_7665_act(Tab,Min,Max,DelNr) ->
%% Whitebox testing of meta name table hashing.
meta_wb(Config) when is_list(Config) ->
EtsMem = etsmem(),
- repeat_for_opts(meta_wb_do),
+ repeat_for_opts(fun meta_wb_do/1),
verify_etsmem(EtsMem).
@@ -5446,7 +5445,7 @@ smp_select_delete(Config) when is_list(Config) ->
%% Test different types.
types(Config) when is_list(Config) ->
init_externals(),
- repeat_for_opts(types_do,[[set,ordered_set],compressed]).
+ repeat_for_opts(fun types_do/1, [[set,ordered_set],compressed]).
types_do(Opts) ->
EtsMem = etsmem(),
@@ -5848,12 +5847,8 @@ log_test_proc(Proc) when is_pid(Proc) ->
Proc.
my_spawn(Fun) -> log_test_proc(spawn(Fun)).
-%%my_spawn(M,F,A) -> log_test_proc(spawn(M,F,A)).
-%%my_spawn(N,M,F,A) -> log_test_proc(spawn(N,M,F,A)).
my_spawn_link(Fun) -> log_test_proc(spawn_link(Fun)).
-my_spawn_link(M,F,A) -> log_test_proc(spawn_link(M,F,A)).
-%%my_spawn_link(N,M,F,A) -> log_test_proc(spawn_link(N,M,F,A)).
my_spawn_opt(Fun,Opts) ->
case spawn_opt(Fun,Opts) of
@@ -6096,7 +6091,7 @@ make_port() ->
open_port({spawn, "efile"}, [eof]).
make_pid() ->
- spawn_link(?MODULE, sleeper, []).
+ spawn_link(fun sleeper/0).
sleeper() ->
receive after infinity -> ok end.
@@ -6232,11 +6227,7 @@ make_unaligned_sub_binary(List) ->
repeat_for_opts(F) ->
repeat_for_opts(F, [write_concurrency, read_concurrency, compressed]).
-repeat_for_opts(F, OptGenList) when is_atom(F) ->
- repeat_for_opts(fun(Opts) -> ?MODULE:F(Opts) end, OptGenList);
-repeat_for_opts({F,Args}, OptGenList) when is_atom(F) ->
- repeat_for_opts(fun(Opts) -> ?MODULE:F(Opts,Args) end, OptGenList);
-repeat_for_opts(F, OptGenList) ->
+repeat_for_opts(F, OptGenList) when is_function(F, 1) ->
repeat_for_opts(F, OptGenList, []).
repeat_for_opts(F, [], Acc) ->
diff --git a/lib/stdlib/test/ets_tough_SUITE.erl b/lib/stdlib/test/ets_tough_SUITE.erl
index 49aba7a529..0abce3200f 100644
--- a/lib/stdlib/test/ets_tough_SUITE.erl
+++ b/lib/stdlib/test/ets_tough_SUITE.erl
@@ -19,10 +19,15 @@
%%
-module(ets_tough_SUITE).
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
- init_per_group/2,end_per_group/2,ex1/1]).
--export([init/1,terminate/2,handle_call/3,handle_info/2]).
+ init_per_group/2,end_per_group/2,
+ ex1/1]).
-export([init_per_testcase/2, end_per_testcase/2]).
--compile([export_all]).
+
+%% gen_server behavior.
+-behavior(gen_server).
+-export([init/1,terminate/2,handle_call/3,handle_cast/2,
+ handle_info/2,code_change/3]).
+
-include_lib("common_test/include/ct.hrl").
suite() ->
@@ -235,33 +240,6 @@ random_element(T) ->
I = rand:uniform(tuple_size(T)),
element(I,T).
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-show_table(N) ->
- FileName = ["etsdump.",integer_to_list(N)],
- case file:open(FileName,read) of
- {ok,Fd} ->
- show_entries(Fd);
- _ ->
- error
- end.
-
-show_entries(Fd) ->
- case phys_read_len(Fd) of
- {ok,Len} ->
- case phys_read_entry(Fd,Len) of
- {ok,ok} ->
- ok;
- {ok,{Key,Val}} ->
- io:format("~w\n",[{Key,Val}]),
- show_entries(Fd);
- _ ->
- error
- end;
- _ ->
- error
- end.
-
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -378,20 +356,6 @@ dget_class(ServerPid,Class,Condition) ->
derase_class(ServerPid,Class) ->
gen_server:call(ServerPid,{handle_delete_class,Class}, infinity).
-%%% dmodify(ServerPid,Application) -> ok
-%%%
-%%% Applies a function on every instance in the database.
-%%% The user provided function must always return one of the
-%%% terms {ok,NewItem}, true, or false.
-%%% Aug 96, this is only used to reset all timestamp values
-%%% in the database.
-%%% The function is supplied as Application = {Mod, Fun, ExtraArgs},
-%%% where the instance will be prepended to ExtraArgs before each
-%%% call is made.
-
-dmodify(ServerPid,Application) ->
- gen_server:call(ServerPid,{handle_dmodify,Application}, infinity).
-
%%% ddump_first(ServerPid,DumpDir) -> {dump_more,Ticket} | already_dumping
%%%
%%% Starts dumping the database. This call redirects all database updates
@@ -643,9 +607,15 @@ handle_call(stop,_From,Admin) ->
?ets_delete(Admin), % Make sure table is gone before reply is sent.
{stop, normal, ok, []}.
+handle_cast(_Req, Admin) ->
+ {noreply, Admin}.
+
handle_info({'EXIT',_Pid,_Reason},Admin) ->
{stop,normal,Admin}.
+code_change(_OldVsn, StateData, _Extra) ->
+ {ok, StateData}.
+
handle_delete(Class, Key, Admin) ->
handle_call({handle_delete,Class,Key},from,Admin).
diff --git a/lib/stdlib/test/filelib_SUITE.erl b/lib/stdlib/test/filelib_SUITE.erl
index 4f8936edbf..87fba815d2 100644
--- a/lib/stdlib/test/filelib_SUITE.erl
+++ b/lib/stdlib/test/filelib_SUITE.erl
@@ -25,7 +25,8 @@
init_per_testcase/2,end_per_testcase/2,
wildcard_one/1,wildcard_two/1,wildcard_errors/1,
fold_files/1,otp_5960/1,ensure_dir_eexist/1,ensure_dir_symlink/1,
- wildcard_symlink/1, is_file_symlink/1, file_props_symlink/1]).
+ wildcard_symlink/1, is_file_symlink/1, file_props_symlink/1,
+ find_source/1]).
-import(lists, [foreach/2]).
@@ -45,7 +46,8 @@ suite() ->
all() ->
[wildcard_one, wildcard_two, wildcard_errors,
fold_files, otp_5960, ensure_dir_eexist, ensure_dir_symlink,
- wildcard_symlink, is_file_symlink, file_props_symlink].
+ wildcard_symlink, is_file_symlink, file_props_symlink,
+ find_source].
groups() ->
[].
@@ -503,3 +505,52 @@ file_props_symlink(Config) ->
FileSize = filelib:file_size(Alias, erl_prim_loader),
FileSize = filelib:file_size(Alias, prim_file)
end.
+
+find_source(Config) when is_list(Config) ->
+ BeamFile = code:which(lists),
+ BeamName = filename:basename(BeamFile),
+ BeamDir = filename:dirname(BeamFile),
+ SrcName = filename:basename(BeamFile, ".beam") ++ ".erl",
+
+ {ok, BeamFile} = filelib:find_file(BeamName, BeamDir),
+ {ok, BeamFile} = filelib:find_file(BeamName, BeamDir, []),
+ {ok, BeamFile} = filelib:find_file(BeamName, BeamDir, [{"",""},{"ebin","src"}]),
+ {error, not_found} = filelib:find_file(BeamName, BeamDir, [{"ebin","src"}]),
+
+ {ok, SrcFile} = filelib:find_file(SrcName, BeamDir),
+ {ok, SrcFile} = filelib:find_file(SrcName, BeamDir, []),
+ {ok, SrcFile} = filelib:find_file(SrcName, BeamDir, [{"foo","bar"},{"ebin","src"}]),
+ {error, not_found} = filelib:find_file(SrcName, BeamDir, [{"",""}]),
+
+ {ok, SrcFile} = filelib:find_source(BeamFile),
+ {ok, SrcFile} = filelib:find_source(BeamName, BeamDir),
+ {ok, SrcFile} = filelib:find_source(BeamName, BeamDir,
+ [{".erl",".yrl",[{"",""}]},
+ {".beam",".erl",[{"ebin","src"}]}]),
+ {error, not_found} = filelib:find_source(BeamName, BeamDir,
+ [{".erl",".yrl",[{"",""}]}]),
+
+ {ok, ParserErl} = filelib:find_source(code:which(erl_parse)),
+ {ok, ParserYrl} = filelib:find_source(ParserErl),
+ "lry." ++ _ = lists:reverse(ParserYrl),
+ {ok, ParserYrl} = filelib:find_source(ParserErl,
+ [{".beam",".erl",[{"ebin","src"}]},
+ {".erl",".yrl",[{"",""}]}]),
+
+ %% find_source automatically checks the local directory regardless of rules
+ {ok, ParserYrl} = filelib:find_source(ParserErl),
+ {ok, ParserYrl} = filelib:find_source(ParserErl,
+ [{".beam",".erl",[{"ebin","src"}]}]),
+
+ %% find_file does not check the local directory unless in the rules
+ ParserYrlName = filename:basename(ParserYrl),
+ ParserYrlDir = filename:dirname(ParserYrl),
+ {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir,
+ [{"",""}]),
+ {error, not_found} = filelib:find_file(ParserYrlName, ParserYrlDir,
+ [{"ebin","src"}]),
+
+ %% local directory is in the default list for find_file
+ {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir),
+ {ok, ParserYrl} = filelib:find_file(ParserYrlName, ParserYrlDir, []),
+ ok.
diff --git a/lib/stdlib/test/filename_SUITE.erl b/lib/stdlib/test/filename_SUITE.erl
index b7c4d3a6e5..54066021fb 100644
--- a/lib/stdlib/test/filename_SUITE.erl
+++ b/lib/stdlib/test/filename_SUITE.erl
@@ -421,8 +421,10 @@ t_nativename(Config) when is_list(Config) ->
find_src(Config) when is_list(Config) ->
{Source,_} = filename:find_src(file),
["file"|_] = lists:reverse(filename:split(Source)),
- {_,_} = filename:find_src(init, [{".","."}, {"ebin","src"}]),
-
+ {Source,_} = filename:find_src(file, [{"",""}, {"ebin","src"}]),
+ {Source,_} = filename:find_src(Source),
+ {Source,_} = filename:find_src(Source ++ ".erl"),
+
%% Try to find the source for a preloaded module.
{error,{preloaded,init}} = filename:find_src(init),
diff --git a/lib/stdlib/test/io_SUITE.erl b/lib/stdlib/test/io_SUITE.erl
index 7d48cbc97c..b0a1e461e3 100644
--- a/lib/stdlib/test/io_SUITE.erl
+++ b/lib/stdlib/test/io_SUITE.erl
@@ -30,7 +30,7 @@
io_lib_print_binary_depth_one/1, otp_10302/1, otp_10755/1,
otp_10836/1, io_lib_width_too_small/1,
io_with_huge_message_queue/1, format_string/1,
- maps/1, coverage/1]).
+ maps/1, coverage/1, otp_14178_unicode_atoms/1]).
-export([pretty/2]).
@@ -61,7 +61,7 @@ all() ->
printable_range, bad_printable_range,
io_lib_print_binary_depth_one, otp_10302, otp_10755, otp_10836,
io_lib_width_too_small, io_with_huge_message_queue,
- format_string, maps, coverage].
+ format_string, maps, coverage, otp_14178_unicode_atoms].
%% Error cases for output.
error_1(Config) when is_list(Config) ->
@@ -2106,3 +2106,24 @@ coverage(_Config) ->
io:format("~s\n", [S2]),
ok.
+
+%% Test UTF-8 atoms.
+otp_14178_unicode_atoms(_Config) ->
+ "atom" = fmt("~ts", ['atom']),
+ "кирилли́ческий атом" = fmt("~ts", ['кирилли́ческий атом']),
+ [16#10FFFF] = fmt("~ts", ['\x{10FFFF}']),
+
+ %% ~s must not accept code points greater than 255.
+ bad_io_lib_format("~s", ['\x{100}']),
+ bad_io_lib_format("~s", ['кирилли́ческий атом']),
+
+ ok.
+
+bad_io_lib_format(F, S) ->
+ try io_lib:format(F, S) of
+ _ ->
+ ct:fail({should_fail,F,S})
+ catch
+ error:badarg ->
+ ok
+ end.
diff --git a/lib/stdlib/test/lists_SUITE.erl b/lib/stdlib/test/lists_SUITE.erl
index 531e97e8d6..5f2d8f0f4e 100644
--- a/lib/stdlib/test/lists_SUITE.erl
+++ b/lib/stdlib/test/lists_SUITE.erl
@@ -121,7 +121,7 @@ groups() ->
{zip, [parallel], [zip_unzip, zip_unzip3, zipwith, zipwith3]},
{misc, [parallel], [reverse, member, dropwhile, takewhile,
filter_partition, suffix, subtract, join,
- hof]}
+ hof, droplast]}
].
init_per_suite(Config) ->
diff --git a/lib/stdlib/test/random_iolist.erl b/lib/stdlib/test/random_iolist.erl
index 555f063e0a..b62cf5b82b 100644
--- a/lib/stdlib/test/random_iolist.erl
+++ b/lib/stdlib/test/random_iolist.erl
@@ -24,17 +24,13 @@
-module(random_iolist).
--export([run/3, run2/3, standard_seed/0, compare/3, compare2/3,
+-export([run/3, standard_seed/0, compare/3,
random_iolist/1]).
run(Iter,Fun1,Fun2) ->
standard_seed(),
compare(Iter,Fun1,Fun2).
-run2(Iter,Fun1,Fun2) ->
- standard_seed(),
- compare2(Iter,Fun1,Fun2).
-
random_byte() ->
rand:uniform(256) - 1.
@@ -150,16 +146,6 @@ do_comp(List,F1,F2) ->
_ ->
true
end.
-
-do_comp(List,List2,F1,F2) ->
- X = F1(List,List2),
- Y = F2(List,List2),
- case X =:= Y of
- false ->
- exit({not_matching,List,List2,X,Y});
- _ ->
- true
- end.
compare(0,Fun1,Fun2) ->
do_comp(<<>>,Fun1,Fun2),
@@ -172,25 +158,3 @@ compare(N,Fun1,Fun2) ->
L = random_iolist(N),
do_comp(L,Fun1,Fun2),
compare(N-1,Fun1,Fun2).
-
-compare2(0,Fun1,Fun2) ->
- L = random_iolist(100),
- do_comp(<<>>,L,Fun1,Fun2),
- do_comp(L,<<>>,Fun1,Fun2),
- do_comp(<<>>,<<>>,Fun1,Fun2),
- do_comp([],L,Fun1,Fun2),
- do_comp(L,[],Fun1,Fun2),
- do_comp([],[],Fun1,Fun2),
- do_comp([[]|<<>>],L,Fun1,Fun2),
- do_comp(L,[[]|<<>>],Fun1,Fun2),
- do_comp([[]|<<>>],[[]|<<>>],Fun1,Fun2),
- do_comp([<<>>,[]|<<>>],L,Fun1,Fun2),
- do_comp(L,[<<>>,[]|<<>>],Fun1,Fun2),
- do_comp([<<>>,[]|<<>>],[<<>>,[]|<<>>],Fun1,Fun2),
- true;
-
-compare2(N,Fun1,Fun2) ->
- L = random_iolist(N),
- L2 = random_iolist(N),
- do_comp(L,L2,Fun1,Fun2),
- compare2(N-1,Fun1,Fun2).
diff --git a/lib/stdlib/test/random_unicode_list.erl b/lib/stdlib/test/random_unicode_list.erl
index 8db2fa8b56..2eeb28113d 100644
--- a/lib/stdlib/test/random_unicode_list.erl
+++ b/lib/stdlib/test/random_unicode_list.erl
@@ -24,7 +24,7 @@
-module(random_unicode_list).
--export([run/3, run/4, run2/3, standard_seed/0, compare/4, compare2/3,
+-export([run/3, run/4, standard_seed/0, compare/4,
random_unicode_list/2]).
run(I,F1,F2) ->
@@ -33,10 +33,6 @@ run(Iter,Fun1,Fun2,Enc) ->
standard_seed(),
compare(Iter,Fun1,Fun2,Enc).
-run2(Iter,Fun1,Fun2) ->
- standard_seed(),
- compare2(Iter,Fun1,Fun2).
-
int_to_utf8(I) when I =< 16#7F ->
<<I>>;
int_to_utf8(I) when I =< 16#7FF ->
@@ -225,16 +221,6 @@ do_comp(List,F1,F2) ->
_ ->
true
end.
-
-do_comp(List,List2,F1,F2) ->
- X = F1(List,List2),
- Y = F2(List,List2),
- case X =:= Y of
- false ->
- exit({not_matching,List,List2,X,Y});
- _ ->
- true
- end.
compare(0,Fun1,Fun2,_Enc) ->
do_comp(<<>>,Fun1,Fun2),
@@ -247,25 +233,3 @@ compare(N,Fun1,Fun2,Enc) ->
L = random_unicode_list(N,Enc),
do_comp(L,Fun1,Fun2),
compare(N-1,Fun1,Fun2,Enc).
-
-compare2(0,Fun1,Fun2) ->
- L = random_unicode_list(100,utf8),
- do_comp(<<>>,L,Fun1,Fun2),
- do_comp(L,<<>>,Fun1,Fun2),
- do_comp(<<>>,<<>>,Fun1,Fun2),
- do_comp([],L,Fun1,Fun2),
- do_comp(L,[],Fun1,Fun2),
- do_comp([],[],Fun1,Fun2),
- do_comp([[]|<<>>],L,Fun1,Fun2),
- do_comp(L,[[]|<<>>],Fun1,Fun2),
- do_comp([[]|<<>>],[[]|<<>>],Fun1,Fun2),
- do_comp([<<>>,[]|<<>>],L,Fun1,Fun2),
- do_comp(L,[<<>>,[]|<<>>],Fun1,Fun2),
- do_comp([<<>>,[]|<<>>],[<<>>,[]|<<>>],Fun1,Fun2),
- true;
-
-compare2(N,Fun1,Fun2) ->
- L = random_unicode_list(N,utf8),
- L2 = random_unicode_list(N,utf8),
- do_comp(L,L2,Fun1,Fun2),
- compare2(N-1,Fun1,Fun2).
diff --git a/lib/stdlib/test/re_testoutput1_replacement_test.erl b/lib/stdlib/test/re_testoutput1_replacement_test.erl
index a40800d760..563e0001e4 100644
--- a/lib/stdlib/test/re_testoutput1_replacement_test.erl
+++ b/lib/stdlib/test/re_testoutput1_replacement_test.erl
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
%%
-module(re_testoutput1_replacement_test).
--compile(export_all).
+-export([run/0]).
-compile(no_native).
%% This file is generated by running run_pcre_tests:gen_repl_test("re_SUITE_data/testoutput1")
run() ->
diff --git a/lib/stdlib/test/re_testoutput1_split_test.erl b/lib/stdlib/test/re_testoutput1_split_test.erl
index 02987971fa..b39cb53a55 100644
--- a/lib/stdlib/test/re_testoutput1_split_test.erl
+++ b/lib/stdlib/test/re_testoutput1_split_test.erl
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
%%
-module(re_testoutput1_split_test).
--compile(export_all).
+-export([run/0]).
-compile(no_native).
%% This file is generated by running run_pcre_tests:gen_split_test("re_SUITE_data/testoutput1")
join([]) -> [];
diff --git a/lib/stdlib/test/run_pcre_tests.erl b/lib/stdlib/test/run_pcre_tests.erl
index ae56db59d6..b62674d6e0 100644
--- a/lib/stdlib/test/run_pcre_tests.erl
+++ b/lib/stdlib/test/run_pcre_tests.erl
@@ -18,8 +18,7 @@
%% %CopyrightEnd%
%%
-module(run_pcre_tests).
-
--compile(export_all).
+-export([test/1,gen_split_test/1,gen_repl_test/1]).
test(RootDir) ->
put(verbose,false),
@@ -119,49 +118,6 @@ test([{RE0,Line,Options0,Tests}|T],PreCompile,XMode,REAsList) ->
end
end.
-loopexec(_,_,X,Y,_,_) when X > Y ->
- {match,[]};
-loopexec(P,Chal,X,Y,Unicode,Xopt) ->
- case re:run(Chal,P,[{offset,X}]++Xopt) of
- nomatch ->
- {match,[]};
- {match,[{A,B}|More]} ->
- {match,Rest} =
- case B>0 of
- true ->
- loopexec(P,Chal,A+B,Y,Unicode,Xopt);
- false ->
- {match,M} = case re:run(Chal,P,[{offset,X},notempty,anchored]++Xopt) of
- nomatch ->
- {match,[]};
- {match,Other} ->
- {match,fixup(Chal,Other,0)}
- end,
- NewA = forward(Chal,A,1,Unicode),
- {match,MM} = loopexec(P,Chal,NewA,Y,Unicode,Xopt),
- {match,M ++ MM}
- end,
- {match,fixup(Chal,[{A,B}|More],0)++Rest}
- end.
-
-forward(_Chal,A,0,_) ->
- A;
-forward(_Chal,A,N,false) ->
- A+N;
-forward(Chal,A,N,true) ->
- <<_:A/binary,Tl/binary>> = Chal,
- Forw = case Tl of
- <<1:1,1:1,0:1,_:5,_/binary>> ->
- 2;
- <<1:1,1:1,1:1,0:1,_:4,_/binary>> ->
- 3;
- <<1:1,1:1,1:1,1:1,0:1,_:3,_/binary>> ->
- 4;
- _ ->
- 1
- end,
- forward(Chal,A+Forw,N-1,true).
-
contains_eightbit(<<>>) ->
false;
contains_eightbit(<<X:8,_/binary>>) when X >= 128 ->
@@ -201,23 +157,6 @@ clean_duplicates([X|T],L) ->
end.
-global_fixup(_,nomatch) ->
- nomatch;
-global_fixup(P,{match,M}) ->
- {match,lists:flatten(global_fixup2(P,M))}.
-
-global_fixup2(_,[]) ->
- [];
-global_fixup2(P,[H|T]) ->
- [gfixup_one(P,0,H)|global_fixup2(P,T)].
-
-gfixup_one(_,_,[]) ->
- [];
-gfixup_one(P,I,[{Start,Len}|T]) ->
- <<_:Start/binary,R:Len/binary,_/binary>> = P,
- [{I,R}|gfixup_one(P,I+1,T)].
-
-
press([]) ->
[];
press([H|T]) ->
@@ -981,7 +920,7 @@ gen_split_test(OneFile) ->
ErlFileName = ErlModule++".erl",
{ok,F}= file:open(ErlFileName,[write]),
io:format(F,"-module(~s).~n",[ErlModule]),
- io:format(F,"-compile(export_all).~n",[]),
+ io:format(F,"-export([run/0]).~n",[]),
io:format(F,"-compile(no_native).~n",[]),
io:format(F,"%% This file is generated by running ~w:gen_split_test(~p)~n",
[?MODULE,OneFile]),
@@ -1024,7 +963,7 @@ dumponesplit(F,{RE,Line,O,TS}) ->
"$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; "
"print \" <<\\\"$x\\\">> = "
"iolist_to_binary(join(re:split(\\\"~s\\\","
- "\\\"~s\\\",~p))), \\n\";'~n",
+ "\\\"~s\\\",~p))),\\n\";'~n",
[zsafe(safe(RE)),
SSS,
ysafe(safe(Str)),
@@ -1035,7 +974,7 @@ dumponesplit(F,{RE,Line,O,TS}) ->
"$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; "
"print \" <<\\\"$x\\\">> = "
"iolist_to_binary(join(re:split(\\\"~s\\\","
- "\\\"~s\\\",~p))), \\n\";'~n",
+ "\\\"~s\\\",~p))),\\n\";'~n",
[zsafe(safe(RE)),
SSS,
ysafe(safe(Str)),
@@ -1046,7 +985,7 @@ dumponesplit(F,{RE,Line,O,TS}) ->
"$x =~~ s/\\\\/\\\\\\\\/g; $x =~~ s/\\\"/\\\\\"/g; "
"print \" <<\\\"$x\\\">> = "
"iolist_to_binary(join(re:split(\\\"~s\\\","
- "\\\"~s\\\",~p))), \\n\";'~n",
+ "\\\"~s\\\",~p))),\\n\";'~n",
[zsafe(safe(RE)),
SSS,
ysafe(safe(Str)),
@@ -1071,7 +1010,7 @@ gen_repl_test(OneFile) ->
ErlFileName = ErlModule++".erl",
{ok,F}= file:open(ErlFileName,[write]),
io:format(F,"-module(~s).~n",[ErlModule]),
- io:format(F,"-compile(export_all).~n",[]),
+ io:format(F,"-export([run/0]).~n",[]),
io:format(F,"-compile(no_native).~n",[]),
io:format(F,"%% This file is generated by running ~w:gen_repl_test(~p)~n",
[?MODULE,OneFile]),
diff --git a/lib/stdlib/test/shell_SUITE.erl b/lib/stdlib/test/shell_SUITE.erl
index 15ccdea284..4864bc3d72 100644
--- a/lib/stdlib/test/shell_SUITE.erl
+++ b/lib/stdlib/test/shell_SUITE.erl
@@ -282,7 +282,7 @@ restricted_local(Config) when is_list(Config) ->
comm_err(<<"begin F=fun() -> hello end, foo(F) end.">>),
"exception error: undefined shell command banan/1" =
comm_err(<<"begin F=fun() -> hello end, banan(F) end.">>),
- "{error,"++_ = t(<<"begin F=fun() -> hello end, c(F) end.">>),
+ "Recompiling "++_ = t(<<"c(shell_SUITE).">>),
"exception exit: restricted shell does not allow l(" ++ _ =
comm_err(<<"begin F=fun() -> hello end, l(F) end.">>),
"exception error: variable 'F' is unbound" =
diff --git a/lib/stdlib/test/tar_SUITE.erl b/lib/stdlib/test/tar_SUITE.erl
index 6f3979bb77..d6b6d3f80c 100644
--- a/lib/stdlib/test/tar_SUITE.erl
+++ b/lib/stdlib/test/tar_SUITE.erl
@@ -22,9 +22,10 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2, borderline/1, atomic/1, long_names/1,
create_long_names/1, bad_tar/1, errors/1, extract_from_binary/1,
- extract_from_binary_compressed/1,
+ extract_from_binary_compressed/1, extract_filtered/1,
extract_from_open_file/1, symlinks/1, open_add_close/1, cooked_compressed/1,
- memory/1,unicode/1]).
+ memory/1,unicode/1,read_other_implementations/1,
+ sparse/1, init/1]).
-include_lib("common_test/include/ct.hrl").
-include_lib("kernel/include/file.hrl").
@@ -35,7 +36,10 @@ all() ->
[borderline, atomic, long_names, create_long_names,
bad_tar, errors, extract_from_binary,
extract_from_binary_compressed, extract_from_open_file,
- symlinks, open_add_close, cooked_compressed, memory, unicode].
+ extract_filtered,
+ symlinks, open_add_close, cooked_compressed, memory, unicode,
+ read_other_implementations,
+ sparse,init].
groups() ->
[].
@@ -84,17 +88,30 @@ borderline(Config) when is_list(Config) ->
ok.
borderline_test(Size, TempDir) ->
- Archive = filename:join(TempDir, "ar_"++integer_to_list(Size)++".tar"),
- Name = filename:join(TempDir, "file_"++integer_to_list(Size)),
io:format("Testing size ~p", [Size]),
+ borderline_test(Size, TempDir, true),
+ borderline_test(Size, TempDir, false),
+ ok.
+
+borderline_test(Size, TempDir, IsUstar) ->
+ Prefix = case IsUstar of
+ true ->
+ "file_";
+ false ->
+ lists:duplicate(100, $f) ++ "ile_"
+ end,
+ SizeList = integer_to_list(Size),
+ Archive = filename:join(TempDir, "ar_"++ SizeList ++".tar"),
+ Name = filename:join(TempDir, Prefix++SizeList),
%% Create a file and archive it.
X0 = erlang:monotonic_time(),
- file:write_file(Name, random_byte_list(X0, Size)),
+ ok = file:write_file(Name, random_byte_list(X0, Size)),
ok = erl_tar:create(Archive, [Name]),
ok = file:delete(Name),
%% Verify listing and extracting.
+ IsUstar = is_ustar(Archive),
{ok, [Name]} = erl_tar:table(Archive),
ok = erl_tar:extract(Archive, [verbose]),
@@ -103,7 +120,12 @@ borderline_test(Size, TempDir) ->
true = match_byte_list(X0, binary_to_list(Bin)),
%% Verify that Unix tar can read it.
- tar_tf(Archive, Name),
+ case IsUstar of
+ true ->
+ tar_tf(Archive, Name);
+ false ->
+ ok
+ end,
ok.
@@ -336,6 +358,7 @@ create_long_names() ->
ok = erl_tar:tt(TarName),
%% Extract and verify.
+ true = is_ustar(TarName),
ExtractDir = "extract_dir",
ok = file:make_dir(ExtractDir),
ok = erl_tar:extract(TarName, [{cwd,ExtractDir}]),
@@ -357,7 +380,7 @@ make_dirs([], Dir) ->
%% Try erl_tar:table/2 and erl_tar:extract/2 on some corrupted tar files.
bad_tar(Config) when is_list(Config) ->
try_bad("bad_checksum", bad_header, Config),
- try_bad("bad_octal", bad_header, Config),
+ try_bad("bad_octal", invalid_tar_checksum, Config),
try_bad("bad_too_short", eof, Config),
try_bad("bad_even_shorter", eof, Config),
ok.
@@ -370,8 +393,10 @@ try_bad(Name0, Reason, Config) ->
Name = Name0 ++ ".tar",
io:format("~nTrying ~s", [Name]),
Full = filename:join(DataDir, Name),
- Opts = [verbose, {cwd, PrivDir}],
+ Dest = filename:join(PrivDir, Name0),
+ Opts = [verbose, {cwd, Dest}],
Expected = {error, Reason},
+ io:fwrite("Expected: ~p\n", [Expected]),
case {erl_tar:table(Full, Opts), erl_tar:extract(Full, Opts)} of
{Expected, Expected} ->
io:format("Result: ~p", [Expected]),
@@ -493,6 +518,27 @@ extract_from_binary_compressed(Config) when is_list(Config) ->
ok.
+%% Test extracting a tar archive from a binary.
+extract_filtered(Config) when is_list(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ Long = filename:join(DataDir, "no_fancy_stuff.tar"),
+ ExtractDir = filename:join(PrivDir, "extract_from_binary"),
+ ok = file:make_dir(ExtractDir),
+
+ ok = erl_tar:extract(Long, [{cwd,ExtractDir},{files,["no_fancy_stuff/EPLICENCE"]}]),
+
+ %% Verify.
+ Dir = filename:join(ExtractDir, "no_fancy_stuff"),
+ true = filelib:is_dir(Dir),
+ false = filelib:is_file(filename:join(Dir, "a_dir_list")),
+ true = filelib:is_file(filename:join(Dir, "EPLICENCE")),
+
+ %% Clean up.
+ delete_files([ExtractDir]),
+
+ ok.
+
%% Test extracting a tar archive from an open file.
extract_from_open_file(Config) when is_list(Config) ->
DataDir = proplists:get_value(data_dir, Config),
@@ -573,6 +619,7 @@ symlinks(Dir, BadSymlink, PointsTo) ->
ok = file:write_file(AFile, ALine),
ok = file:make_symlink(AFile, GoodSymlink),
ok = erl_tar:create(Tar, [BadSymlink, GoodSymlink, AFile], [verbose]),
+ true = is_ustar(Tar),
%% List contents of tar file.
@@ -581,6 +628,7 @@ symlinks(Dir, BadSymlink, PointsTo) ->
%% Also create another archive with the dereference flag.
ok = erl_tar:create(DerefTar, [AFile, GoodSymlink], [dereference, verbose]),
+ true = is_ustar(DerefTar),
%% Extract files to a new directory.
@@ -619,13 +667,50 @@ long_symlink(Dir) ->
ok = file:set_cwd(Dir),
AFile = "long_symlink",
- FarTooLong = "/tmp/aarrghh/this/path/is/far/longer/than/one/hundred/characters/which/is/the/maximum/number/of/characters/allowed",
- ok = file:make_symlink(FarTooLong, AFile),
- {error,Error} = erl_tar:create(Tar, [AFile], [verbose]),
- io:format("Error: ~s\n", [erl_tar:format_error(Error)]),
- {FarTooLong,symbolic_link_too_long} = Error,
+ RequiresPAX = "/tmp/aarrghh/this/path/is/far/longer/than/one/hundred/characters/which/is/the/maximum/number/of/characters/allowed",
+ ok = file:make_symlink(RequiresPAX, AFile),
+ ok = erl_tar:create(Tar, [AFile], [verbose]),
+ false = is_ustar(Tar),
+ NewDir = filename:join(Dir, "extracted"),
+ _ = file:make_dir(NewDir),
+ ok = erl_tar:extract(Tar, [{cwd, NewDir}, verbose]),
+ ok = file:set_cwd(NewDir),
+ {ok, #file_info{type=symlink}} = file:read_link_info(AFile),
+ {ok, RequiresPAX} = file:read_link(AFile),
+ ok.
+
+init(Config) when is_list(Config) ->
+ PrivDir = proplists:get_value(priv_dir, Config),
+ ok = file:set_cwd(PrivDir),
+ Dir = filename:join(PrivDir, "init"),
+ ok = file:make_dir(Dir),
+
+ [{FileOne,_,_}|_] = oac_files(),
+ TarOne = filename:join(Dir, "archive1.tar"),
+ {ok,Fd} = file:open(TarOne, [write]),
+
+ %% If the arity of the fun is wrong, badarg should be returned
+ {error, badarg} = erl_tar:init(Fd, write, fun file_op_bad/1),
+
+ %% Otherwise we should be good to go
+ {ok, Tar} = erl_tar:init(Fd, write, fun file_op/2),
+ ok = erl_tar:add(Tar, FileOne, []),
+ ok = erl_tar:close(Tar),
+ {ok, [FileOne]} = erl_tar:table(TarOne),
ok.
+file_op_bad(_) ->
+ throw({error, should_never_be_called}).
+
+file_op(write, {Fd, Data}) ->
+ file:write(Fd, Data);
+file_op(position, {Fd, Pos}) ->
+ file:position(Fd, Pos);
+file_op(read2, {Fd, Size}) ->
+ file:read(Fd, Size);
+file_op(close, Fd) ->
+ file:close(Fd).
+
open_add_close(Config) when is_list(Config) ->
PrivDir = proplists:get_value(priv_dir, Config),
ok = file:set_cwd(PrivDir),
@@ -643,17 +728,26 @@ open_add_close(Config) when is_list(Config) ->
TarOne = filename:join(Dir, "archive1.tar"),
{ok,AD} = erl_tar:open(TarOne, [write]),
ok = erl_tar:add(AD, FileOne, []),
- ok = erl_tar:add(AD, FileTwo, "second file", []),
- ok = erl_tar:add(AD, FileThree, [verbose]),
+
+ %% Add with {NameInArchive,Name}
+ ok = erl_tar:add(AD, {"second file", FileTwo}, []),
+
+ %% Add with {binary, Bin}
+ {ok,FileThreeBin} = file:read_file(FileThree),
+ ok = erl_tar:add(AD, {FileThree, FileThreeBin}, [verbose]),
+
+ %% Add with Name
ok = erl_tar:add(AD, FileThree, "chunked", [{chunks,11411},verbose]),
ok = erl_tar:add(AD, ADir, [verbose]),
ok = erl_tar:add(AD, AnotherDir, [verbose]),
ok = erl_tar:close(AD),
+ true = is_ustar(TarOne),
ok = erl_tar:t(TarOne),
ok = erl_tar:tt(TarOne),
- {ok,[FileOne,"second file",FileThree,"chunked",ADir,SomeContent]} = erl_tar:table(TarOne),
+ Expected = {ok,[FileOne,"second file",FileThree,"chunked",ADir,SomeContent]},
+ Expected = erl_tar:table(TarOne),
delete_files(["oac_file","oac_small","oac_big",Dir,AnotherDir,ADir]),
@@ -718,6 +812,41 @@ memory(Config) when is_list(Config) ->
ok = delete_files([Name1,Name2]),
ok.
+read_other_implementations(Config) when is_list(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ Files = ["v7.tar", "gnu.tar", "bsd.tar",
+ "star.tar", "pax_mtime.tar"],
+ do_read_other_implementations(Files, DataDir).
+
+do_read_other_implementations([], _DataDir) ->
+ ok;
+do_read_other_implementations([File|Rest], DataDir) ->
+ io:format("~nTrying ~s", [File]),
+ Full = filename:join(DataDir, File),
+ {ok, _} = erl_tar:table(Full),
+ {ok, _} = erl_tar:extract(Full, [memory]),
+ do_read_other_implementations(Rest, DataDir).
+
+
+%% Test handling of sparse files
+sparse(Config) when is_list(Config) ->
+ DataDir = proplists:get_value(data_dir, Config),
+ PrivDir = proplists:get_value(priv_dir, Config),
+ Sparse01Empty = "sparse01_empty.tar",
+ Sparse01 = "sparse01.tar",
+ Sparse10Empty = "sparse10_empty.tar",
+ Sparse10 = "sparse10.tar",
+ do_sparse([Sparse01Empty, Sparse01, Sparse10Empty, Sparse10], DataDir, PrivDir).
+
+do_sparse([], _DataDir, _PrivDir) ->
+ ok;
+do_sparse([Name|Rest], DataDir, PrivDir) ->
+ io:format("~nTrying sparse file ~s", [Name]),
+ Full = filename:join(DataDir, Name),
+ {ok, [_]} = erl_tar:table(Full),
+ {ok, _} = erl_tar:extract(Full, [memory]),
+ do_sparse(Rest, DataDir, PrivDir).
+
%% Test filenames with characters outside the US ASCII range.
unicode(Config) when is_list(Config) ->
run_unicode_node(Config, "+fnu"),
@@ -753,6 +882,9 @@ do_unicode(PrivDir) ->
Names = lists:sort(unicode_create_files()),
Tar = "unicöde.tar",
ok = erl_tar:create(Tar, ["unicöde"], []),
+
+ %% Unicode filenames require PAX format.
+ false = is_ustar(Tar),
{ok,Names0} = erl_tar:table(Tar, []),
Names = lists:sort(Names0),
_ = [ok = file:delete(Name) || Name <- Names],
@@ -850,3 +982,15 @@ start_node(Name, Args) ->
ct:log("Node ~p started~n", [Node]),
Node
end.
+
+%% Test that the given tar file is a plain USTAR archive,
+%% without any PAX extensions.
+is_ustar(File) ->
+ {ok,Bin} = file:read_file(File),
+ <<_:257/binary,"ustar",0,_/binary>> = Bin,
+ <<_:156/binary,Type:8,_/binary>> = Bin,
+ case Type of
+ $x -> false;
+ $g -> false;
+ _ -> true
+ end.
diff --git a/lib/stdlib/test/tar_SUITE_data/bsd.tar b/lib/stdlib/test/tar_SUITE_data/bsd.tar
new file mode 100644
index 0000000000..8c31864be0
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/bsd.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/gnu.tar b/lib/stdlib/test/tar_SUITE_data/gnu.tar
new file mode 100644
index 0000000000..60268065c1
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/gnu.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar b/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar
new file mode 100644
index 0000000000..1b6e80ffac
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/pax_mtime.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse00.tar b/lib/stdlib/test/tar_SUITE_data/sparse00.tar
new file mode 100644
index 0000000000..61a04de90b
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse00.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse01.tar b/lib/stdlib/test/tar_SUITE_data/sparse01.tar
new file mode 100644
index 0000000000..61a04de90b
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse01.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar b/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar
new file mode 100644
index 0000000000..efa6d060f4
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse01_empty.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse10.tar b/lib/stdlib/test/tar_SUITE_data/sparse10.tar
new file mode 100644
index 0000000000..61a04de90b
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse10.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar b/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar
new file mode 100644
index 0000000000..efa6d060f4
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/sparse10_empty.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/star.tar b/lib/stdlib/test/tar_SUITE_data/star.tar
new file mode 100644
index 0000000000..b0631e3b13
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/star.tar
Binary files differ
diff --git a/lib/stdlib/test/tar_SUITE_data/v7.tar b/lib/stdlib/test/tar_SUITE_data/v7.tar
new file mode 100644
index 0000000000..9918e006bb
--- /dev/null
+++ b/lib/stdlib/test/tar_SUITE_data/v7.tar
Binary files differ
diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl
index 72170ec5da..b92cd8d607 100644
--- a/lib/syntax_tools/src/igor.erl
+++ b/lib/syntax_tools/src/igor.erl
@@ -417,7 +417,7 @@ merge_files(Name, Files, Options) ->
%%
%% <dd>Specifies a list of rules for associating object files with
%% source files, to be passed to the function
-%% `filename:find_src/2'. This can be used to change the
+%% `filelib:find_source/2'. This can be used to change the
%% way Igor looks for source files. If this option is not specified,
%% the default system rules are used. The first occurrence of this
%% option completely overrides any later in the option list.</dd>
@@ -462,7 +462,7 @@ merge_files(Name, Files, Options) ->
%% @see merge/3
%% @see merge_files/3
%% @see merge_sources/3
-%% @see //stdlib/filename:find_src/2
+%% @see //stdlib/filelib:find_source/2
%% @see epp_dodger
-spec merge_files(atom(), erl_syntax:forms(), [file:filename()], [option()]) ->
@@ -2746,8 +2746,8 @@ read_module(Name, Options) ->
%% It seems that we have no file - go on anyway,
%% just to get a decent error message.
read_module_1(Name, Options);
- {Name1, _} ->
- read_module_1(Name1 ++ ".erl", Options)
+ {ok, Name1} ->
+ read_module_1(Name1, Options)
end
end.
@@ -2807,9 +2807,9 @@ check_forms([], _) ->
ok.
find_src(Name, undefined) ->
- filename:find_src(filename(Name));
+ filelib:find_source(filename(Name));
find_src(Name, Rules) ->
- filename:find_src(filename(Name), Rules).
+ filelib:find_source(filename(Name), Rules).
%% file_type(filename()) -> {value, Type} | none
diff --git a/lib/tools/emacs/erlang-edoc.el b/lib/tools/emacs/erlang-edoc.el
index 2801aa8ae7..d0dcc81028 100644
--- a/lib/tools/emacs/erlang-edoc.el
+++ b/lib/tools/emacs/erlang-edoc.el
@@ -36,7 +36,7 @@
"Tags that can be used anywhere within a module.")
(defvar erlang-edoc-overview-tags
- '("author" "copyright" "reference" "see" "since" "title" "version")
+ '("author" "copyright" "doc" "reference" "see" "since" "title" "version")
"Tags that can be used in an overview file.")
(defvar erlang-edoc-module-tags
@@ -45,8 +45,8 @@
"Tags that can be used before a module declaration.")
(defvar erlang-edoc-function-tags
- '("deprecated" "doc" "equiv" "hidden" "private" "see" "since" "spec"
- "throws" "type")
+ '("deprecated" "doc" "equiv" "hidden" "param" "private" "returns"
+ "see" "since" "spec" "throws" "type")
"Tags that can be used before a function definition.")
(defvar erlang-edoc-predefined-macros
@@ -169,4 +169,10 @@
(jit-lock-refontify))
(provide 'erlang-edoc)
+
+;; Local variables:
+;; coding: utf-8
+;; indent-tabs-mode: nil
+;; End:
+
;;; erlang-edoc.el ends here
diff --git a/lib/tools/emacs/erlang-eunit.el b/lib/tools/emacs/erlang-eunit.el
index 3b85e6680a..38c40927f4 100644
--- a/lib/tools/emacs/erlang-eunit.el
+++ b/lib/tools/emacs/erlang-eunit.el
@@ -68,7 +68,7 @@ buffer and vice versa"
;;;
(defun erlang-eunit-open-src-file-other-window (test-file-path)
"Open the src file which corresponds to the an EUnit test file"
- (find-file-other-window (erlang-eunit-src-filename test-file-path)))
+ (find-file-other-window (erlang-eunit-src-filename test-file-path)))
;;; Return the name and path of the EUnit test file
;;, (input may be either the source filename itself or the EUnit test filename)
@@ -154,7 +154,7 @@ buffer and vice versa"
;;; Join filenames
(defun filename-join (dir file)
(if (or (= (elt file 0) ?/)
- (= (car (last (append dir nil))) ?/))
+ (= (car (last (append dir nil))) ?/))
(concat dir file)
(concat dir "/" file)))
@@ -299,7 +299,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
;;; Compile source and EUnit test file and finally run EUnit tests for
;;; the current module
(defun erlang-eunit-compile-and-test (test-fun test-args &optional under-cover)
- "Compile the source and test files and run the EUnit test suite.
+ "Compile the source and test files and run the EUnit test suite.
If under-cover is set to t, the module under test is compile for
code coverage analysis. If under-cover is left out or not set,
@@ -311,7 +311,7 @@ and the number of times each line is covered).
With prefix arg, compiles for debug and runs tests with the verbose flag set."
(erlang-eunit-record-recent-compile under-cover)
(let ((src-filename (erlang-eunit-src-filename buffer-file-name))
- (test-filename (erlang-eunit-test-filename buffer-file-name)))
+ (test-filename (erlang-eunit-test-filename buffer-file-name)))
;; The purpose of out-maneuvering `save-some-buffers', as is done
;; below, is to ask the question about saving buffers only once,
@@ -326,9 +326,9 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
;; be placed in the source file instead. Any compilation error
;; will prevent the subsequent steps to be run (hence the `and')
(and (erlang-eunit-compile-file src-filename under-cover)
- (if (file-readable-p test-filename)
- (erlang-eunit-compile-file test-filename)
- t)
+ (if (file-readable-p test-filename)
+ (erlang-eunit-compile-file test-filename)
+ t)
(apply test-fun test-args)
(if under-cover
(save-excursion
@@ -381,16 +381,16 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
(goto-char compilation-parsing-end)
(erlang-eunit-all-list-elems-fulfill-p
(lambda (re) (let ((continue t)
- (result t))
- (while continue ; ignore warnings, stop at errors
- (if (re-search-forward re (point-max) t)
- (if (erlang-eunit-is-compilation-warning)
- t
- (setq result nil)
- (setq continue nil))
- (setq result t)
- (setq continue nil)))
- result))
+ (result t))
+ (while continue ; ignore warnings, stop at errors
+ (if (re-search-forward re (point-max) t)
+ (if (erlang-eunit-is-compilation-warning)
+ t
+ (setq result nil)
+ (setq continue nil))
+ (setq result t)
+ (setq continue nil)))
+ result))
(mapcar (lambda (e) (car e)) erlang-error-regexp-alist))))
(defun erlang-eunit-is-compilation-warning ()
@@ -402,7 +402,7 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
(let ((matches-p t))
(while (and list matches-p)
(if (not (funcall pred (car list)))
- (setq matches-p nil))
+ (setq matches-p nil))
(setq list (cdr list)))
matches-p))
@@ -439,15 +439,21 @@ With prefix arg, compiles for debug and runs tests with the verbose flag set."
(defun erlang-eunit-ensure-keymap-for-key (key-seq)
(let ((prefix-keys (butlast (append key-seq nil)))
- (prefix-seq ""))
+ (prefix-seq ""))
(while prefix-keys
(setq prefix-seq (concat prefix-seq (make-string 1 (car prefix-keys))))
(setq prefix-keys (cdr prefix-keys))
(if (not (keymapp (lookup-key (current-local-map) prefix-seq)))
- (local-set-key prefix-seq (make-sparse-keymap))))))
+ (local-set-key prefix-seq (make-sparse-keymap))))))
(add-hook 'erlang-mode-hook 'erlang-eunit-add-key-bindings)
(provide 'erlang-eunit)
-;; erlang-eunit ends here
+
+;; Local variables:
+;; coding: utf-8
+;; indent-tabs-mode: nil
+;; End:
+
+;; erlang-eunit.el ends here
diff --git a/lib/tools/emacs/erlang-pkg.el b/lib/tools/emacs/erlang-pkg.el
index 4d0aa6fcd3..02d6bebbf4 100644
--- a/lib/tools/emacs/erlang-pkg.el
+++ b/lib/tools/emacs/erlang-pkg.el
@@ -1,3 +1,3 @@
(define-package "erlang" "2.7.0"
- "Erlang major mode"
- '())
+ "Erlang major mode"
+ '((emacs "24.1")))
diff --git a/lib/tools/emacs/erlang-start.el b/lib/tools/emacs/erlang-start.el
index 160057e179..c35f280bf4 100644
--- a/lib/tools/emacs/erlang-start.el
+++ b/lib/tools/emacs/erlang-start.el
@@ -39,7 +39,7 @@
;;
;; Please state as exactly as possible:
;; - Version number of Erlang Mode (see the menu), Emacs, Erlang,
-;; and of any other relevant software.
+;; and of any other relevant software.
;; - What the expected result was.
;; - What you did, preferably in a repeatable step-by-step form.
;; - A description of the unexpected result.
@@ -60,7 +60,7 @@
;;
(autoload 'erlang-mode "erlang" "Major mode for editing Erlang code." t)
-(autoload 'erlang-version "erlang"
+(autoload 'erlang-version "erlang"
"Return the current version of Erlang mode." t)
(autoload 'erlang-shell "erlang" "Start a new Erlang shell." t)
(autoload 'run-erlang "erlang" "Start a new Erlang shell." t)
@@ -68,7 +68,7 @@
(autoload 'erlang-compile "erlang"
"Compile Erlang module in current buffer." t)
-(autoload 'erlang-man-module "erlang"
+(autoload 'erlang-man-module "erlang"
"Find manual page for MODULE." t)
(autoload 'erlang-man-function "erlang"
"Find manual page for NAME, where NAME is module:function." t)
@@ -108,25 +108,22 @@ A function suitable for `eldoc-documentation-function'.\n\n(fn)" nil nil)
;;
;; Associate files using interpreter "escript" with Erlang mode.
-;;
+;;
;;;###autoload
(add-to-list 'interpreter-mode-alist (cons "escript" 'erlang-mode))
;;
;; Ignore files ending in ".jam", ".vee", and ".beam" when performing
-;; file completion.
+;; file completion and in dired omit mode.
;;
;;;###autoload
(let ((erl-ext '(".jam" ".vee" ".beam")))
(while erl-ext
- (let ((cie completion-ignored-extensions))
- (while (and cie (not (string-equal (car cie) (car erl-ext))))
- (setq cie (cdr cie)))
- (if (null cie)
- (setq completion-ignored-extensions
- (cons (car erl-ext) completion-ignored-extensions))))
+ (add-to-list 'completion-ignored-extensions (car erl-ext))
+ (when (boundp 'dired-omit-extensions)
+ (add-to-list 'dired-omit-extensions (car erl-ext)))
(setq erl-ext (cdr erl-ext))))
@@ -136,4 +133,9 @@ A function suitable for `eldoc-documentation-function'.\n\n(fn)" nil nil)
(provide 'erlang-start)
+;; Local variables:
+;; coding: utf-8
+;; indent-tabs-mode: nil
+;; End:
+
;; erlang-start.el ends here.
diff --git a/lib/tools/emacs/erlang-test.el b/lib/tools/emacs/erlang-test.el
index ba6190d194..ea5d637199 100644
--- a/lib/tools/emacs/erlang-test.el
+++ b/lib/tools/emacs/erlang-test.el
@@ -2,7 +2,7 @@
;;; Unit tests for erlang.el.
-;; Author: Johan Claesson
+;; Author: Johan Claesson
;; Created: 2016-05-07
;; Keywords: erlang, languages
@@ -28,6 +28,27 @@
;;; Commentary:
;; This library require GNU Emacs 25 or later.
+;;
+;; There are two ways to run emacs unit tests.
+;;
+;; 1. Within a running emacs process. Load this file. Then to run
+;; all defined test cases:
+;;
+;; M-x ert RET t RET
+;;
+;; To run only the erlang test cases:
+;;
+;; M-x ert RET "^erlang" RET
+;;
+;;
+;; 2. In a new stand-alone emacs process. This process exits
+;; when it executed the tests. For example:
+;;
+;; emacs -Q -batch -L . -l erlang.el -l erlang-test.el \
+;; -f ert-run-tests-batch-and-exit
+;;
+;; The -L option adds a directory to the load-path. It should be the
+;; directory containing erlang.el and erlang-test.el.
;;; Code:
@@ -59,11 +80,12 @@ concatenated to form an erlang file to test on.")
tags-file-name
tags-table-list
tags-table-set-list
+ tags-add-tables
+ tags-completion-table
erlang-buffer
erlang-mode-hook
prog-mode-hook
- erlang-shell-mode-hook
- tags-add-tables)
+ erlang-shell-mode-hook)
(unwind-protect
(progn
(setq-default tags-file-name nil)
@@ -71,11 +93,14 @@ concatenated to form an erlang file to test on.")
(erlang-test-create-erlang-file erlang-file)
(erlang-test-compile-tags erlang-file tags-file)
(setq erlang-buffer (find-file-noselect erlang-file))
- (with-current-buffer erlang-buffer
- (setq-local tags-file-name tags-file))
- ;; Setting global tags-file-name is a workaround for
- ;; GNU Emacs bug#23164.
- (setq tags-file-name tags-file)
+ (if (< emacs-major-version 26)
+ (progn
+ (with-current-buffer erlang-buffer
+ (setq-local tags-file-name tags-file))
+ ;; Setting global tags-file-name is a workaround for
+ ;; GNU Emacs bug#23164.
+ (setq tags-file-name tags-file))
+ (visit-tags-table tags-file t))
(erlang-test-complete-at-point tags-file)
(erlang-test-completion-table)
(erlang-test-xref-find-definitions erlang-file erlang-buffer))
@@ -117,12 +142,20 @@ concatenated to form an erlang file to test on.")
for line = 1 then (1+ line)
do (when tagname
(switch-to-buffer erlang-buffer)
- (xref-find-definitions tagname)
- (erlang-test-verify-pos erlang-file line)
- (xref-find-definitions (concat "erlang_test:" tagname))
- (erlang-test-verify-pos erlang-file line)))
- (xref-find-definitions "erlang_test:")
- (erlang-test-verify-pos erlang-file 1))
+ (erlang-test-xref-jump tagname erlang-file line)
+ (erlang-test-xref-jump (concat "erlang_test:" tagname)
+ erlang-file line)))
+ (erlang-test-xref-jump "erlang_test:" erlang-file 1))
+
+(defun erlang-test-xref-jump (id expected-file expected-line)
+ (goto-char (point-max))
+ (insert "\n%% " id)
+ (save-buffer)
+ (if (fboundp 'xref-find-definitions)
+ (xref-find-definitions (erlang-id-to-string
+ (erlang-get-identifier-at-point)))
+ (error "xref-find-definitions not defined (too old emacs?)"))
+ (erlang-test-verify-pos expected-file expected-line))
(defun erlang-test-verify-pos (expected-file expected-line)
(should (string-equal (file-truename expected-file)
@@ -136,13 +169,13 @@ concatenated to form an erlang file to test on.")
(setq-local tags-file-name tags-file)
(insert "\nerlang_test:fun")
(erlang-complete-tag)
- (should (looking-back "erlang_test:function"))
+ (should (looking-back "erlang_test:function" (point-at-bol)))
(insert "\nfun")
(erlang-complete-tag)
- (should (looking-back "function"))
+ (should (looking-back "function" (point-at-bol)))
(insert "\nerlang_")
(erlang-complete-tag)
- (should (looking-back "erlang_test:"))))
+ (should (looking-back "erlang_test:" (point-at-bol)))))
(ert-deftest erlang-test-compile-options ()
@@ -179,6 +212,30 @@ concatenated to form an erlang file to test on.")
erlang))
+(ert-deftest erlang-test-parse-id ()
+ (cl-loop for id-string in '("fun/10"
+ "qualified-function module:fun/10"
+ "record reko"
+ "macro _SYMBOL"
+ "macro MACRO/10"
+ "module modula"
+ "macro"
+ nil)
+ for id-list in '((nil nil "fun" 10)
+ (qualified-function "module" "fun" 10)
+ (record nil "reko" nil)
+ (macro nil "_SYMBOL" nil)
+ (macro nil "MACRO" 10)
+ (module nil "modula" nil)
+ (nil nil "macro" nil)
+ nil)
+ for id-list2 = (erlang-id-to-list id-string)
+ do (should (equal id-list id-list2))
+ for id-string2 = (erlang-id-to-string id-list)
+ do (should (equal id-string id-string2))
+ collect id-list2))
+
+
(provide 'erlang-test)
;;; erlang-test.el ends here
diff --git a/lib/tools/emacs/erlang.el b/lib/tools/emacs/erlang.el
index 51f7e8e26c..59b20c552e 100644
--- a/lib/tools/emacs/erlang.el
+++ b/lib/tools/emacs/erlang.el
@@ -4,6 +4,8 @@
;; Author: Anders Lindgren
;; Keywords: erlang, languages, processes
;; Date: 2011-12-11
+;; Version: 2.7.0
+;; Package-Requires: ((emacs "24.1"))
;; %CopyrightBegin%
;;
@@ -24,7 +26,7 @@
;; %CopyrightEnd%
;;
-;; Lars Thors�n's modifications of 2000-06-07 included.
+;; Lars Thorsén's modifications of 2000-06-07 included.
;; The original version of this package was written by Robert Virding.
;;
;;; Commentary:
@@ -85,30 +87,15 @@
(defconst erlang-version "2.7"
"The version number of Erlang mode.")
-(defvar erlang-root-dir nil
+(defcustom erlang-root-dir nil
"The directory where the Erlang system is installed.
The name should not contain the trailing slash.
Should this variable be nil, no manual pages will show up in the
-Erlang mode menu.")
-
-(eval-and-compile
- (defconst erlang-emacs-major-version
- (if (boundp 'emacs-major-version)
- emacs-major-version
- (string-match "\\([0-9]+\\)\\.\\([0-9]+\\)" emacs-version)
- (erlang-string-to-int (substring emacs-version
- (match-beginning 1) (match-end 1))))
- "Major version number of Emacs."))
-
-(eval-and-compile
- (defconst erlang-emacs-minor-version
- (if (boundp 'emacs-minor-version)
- emacs-minor-version
- (string-match "\\([0-9]+\\)\\.\\([0-9]+\\)" emacs-version)
- (erlang-string-to-int (substring emacs-version
- (match-beginning 2) (match-end 2))))
- "Minor version number of Emacs."))
+Erlang mode menu."
+ :group 'erlang
+ :type '(restricted-sexp :match-alternatives (stringp 'nil))
+ :safe (lambda (val) (or (eq nil val) (stringp val))))
(defconst erlang-xemacs-p (string-match "Lucid\\|XEmacs" emacs-version)
"Non-nil when running under XEmacs or Lucid Emacs.")
@@ -129,7 +116,7 @@ Never EVER set this variable!")
erlang-menu-man-items
erlang-menu-personal-items
erlang-menu-version-items)
- "*List of menu item list to combine to create Erlang mode menu.
+ "List of menu item list to combine to create Erlang mode menu.
External programs which temporarily add menu items to the Erlang mode
menu may use this variable. Please use the function `add-hook' to add
@@ -238,7 +225,7 @@ This variable is added to the list of Erlang menus stored in
The menu is in the form described by the variable `erlang-menu-base-items'.")
(defvar erlang-mode-hook nil
- "*Functions to run when Erlang mode is activated.
+ "Functions to run when Erlang mode is activated.
This hook is used to change the behaviour of Erlang mode. It is
normally used by the user to personalise the programming environment.
@@ -272,7 +259,7 @@ To use the example, copy the following lines to your `~/.emacs' file:
(imenu-add-to-menubar \"Imenu\")))")
(defvar erlang-load-hook nil
- "*Functions to run when Erlang mode is loaded.
+ "Functions to run when Erlang mode is loaded.
This hook is used to change the behaviour of Erlang mode. It is
normally used by the user to personalise the programming environment.
@@ -304,17 +291,20 @@ manual pages can be retrieved (note that you must set the value of
A useful function is `tempo-template-erlang-normal-header'.
\(This function only exists when the `tempo' package is available.)")
-(defvar erlang-check-module-name 'ask
- "*Non-nil means check that module name and file name agrees when saving.
+(defcustom erlang-check-module-name 'ask
+ "Non-nil means check that module name and file name agrees when saving.
-If the value of this variable is the atom `ask', the user is
-prompted. If the value is t the source is silently changed.")
+If the value of this variable is the symbol `ask', the user is
+prompted. If the value is t the source is silently changed."
+ :group 'erlang
+ :type '(choice (const :tag "Check on save" 'ask)
+ (const :tag "Don't check on save" t)))
(defvar erlang-electric-commands
'(erlang-electric-comma
erlang-electric-semicolon
erlang-electric-gt)
- "*List of activated electric commands.
+ "List of activated electric commands.
The list should contain the electric commands which should be active.
Currently, the available electric commands are:
@@ -328,8 +318,8 @@ are activated.
To deactivate all electric commands, set this variable to nil.")
-(defvar erlang-electric-newline-inhibit t
- "*Set to non-nil to inhibit newline after electric command.
+(defcustom erlang-electric-newline-inhibit t
+ "Set to non-nil to inhibit newline after electric command.
This is useful since a lot of people press return after executing an
electric command.
@@ -339,28 +329,32 @@ list `erlang-electric-newline-inhibit-list'.
Note that commands in this list are required to set the variable
`erlang-electric-newline-inhibit' to nil when the newline shouldn't be
-inhibited.")
+inhibited."
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
(defvar erlang-electric-newline-inhibit-list
'(erlang-electric-semicolon
erlang-electric-comma
erlang-electric-gt)
- "*Commands which can inhibit the next newline.")
+ "Commands which can inhibit the next newline.")
-(defvar erlang-electric-semicolon-insert-blank-lines nil
- "*Number of blank lines inserted before header, or nil.
+(defcustom erlang-electric-semicolon-insert-blank-lines nil
+ "Number of blank lines inserted before header, or nil.
This variable controls the behaviour of `erlang-electric-semicolon'
when a new function header is generated. When nil, no blank line is
inserted between the current line and the new header. When bound to a
number it represents the number of blank lines which should be
-inserted.")
+inserted."
+ :group 'erlang)
(defvar erlang-electric-semicolon-criteria
'(erlang-next-lines-empty-p
erlang-at-keyword-end-p
erlang-at-end-of-function-p)
- "*List of functions controlling `erlang-electric-semicolon'.
+ "List of functions controlling `erlang-electric-semicolon'.
The functions in this list are called, in order, whenever a semicolon
is typed. Each function in the list is called with no arguments,
and should return one of the following values:
@@ -381,7 +375,7 @@ The test is performed by the function `erlang-test-criteria-list'.")
erlang-at-keyword-end-p
erlang-at-end-of-clause-p
erlang-at-end-of-function-p)
- "*List of functions controlling `erlang-electric-comma'.
+ "List of functions controlling `erlang-electric-comma'.
The functions in this list are called, in order, whenever a comma
is typed. Each function in the list is called with no arguments,
and should return one of the following values:
@@ -399,7 +393,7 @@ The test is performed by the function `erlang-test-criteria-list'.")
'(erlang-stop-when-in-type-spec
erlang-next-lines-empty-p
erlang-at-end-of-function-p)
- "*List of functions controlling the arrow aspect of `erlang-electric-gt'.
+ "List of functions controlling the arrow aspect of `erlang-electric-gt'.
The functions in this list are called, in order, whenever a `>'
is typed. Each function in the list is called with no arguments,
and should return one of the following values:
@@ -415,7 +409,7 @@ The test is performed by the function `erlang-test-criteria-list'.")
(defvar erlang-electric-newline-criteria
'(t)
- "*List of functions controlling `erlang-electric-newline'.
+ "List of functions controlling `erlang-electric-newline'.
The electric newline commands indents the next line. Should the
current line begin with a comment the comment start is copied to
@@ -435,8 +429,8 @@ list, it is treated as a function triggering the electric command.
The test is performed by the function `erlang-test-criteria-list'.")
-(defvar erlang-next-lines-empty-threshold 2
- "*Number of blank lines required to activate an electric command.
+(defcustom erlang-next-lines-empty-threshold 2
+ "Number of blank lines required to activate an electric command.
Actually, this value controls the behaviour of the function
`erlang-next-lines-empty-p' which normally is a member of the
@@ -457,46 +451,67 @@ function `erlang-next-lines-empty-p' would be removed from the criteria
lists.
Note that even if `erlang-next-lines-empty-p' should not trigger an
-electric command, other functions in the criteria list could.")
+electric command, other functions in the criteria list could."
+ :group 'erlang
+ :type '(restricted-sexp :match-alternatives (integerp 'nil))
+ :safe (lambda (val) (or (eq val nil) (integerp val))))
-(defvar erlang-new-clause-with-arguments nil
- "*Non-nil means that the arguments are cloned when a clause is generated.
+(defcustom erlang-new-clause-with-arguments nil
+ "Non-nil means that the arguments are cloned when a clause is generated.
A new function header can be generated by calls to the function
-`erlang-generate-new-clause' and by use of the electric semicolon.")
+`erlang-generate-new-clause' and by use of the electric semicolon."
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
-(defvar erlang-compile-use-outdir t
- "*When nil, go to the directory containing source file when compiling.
+(defcustom erlang-compile-use-outdir t
+ "When nil, go to the directory containing source file when compiling.
This is a workaround for a bug in the `outdir' option of compile. If the
outdir is not in the current load path, Erlang doesn't load the object
module after it has been compiled.
To activate the workaround, place the following in your `~/.emacs' file:
- (setq erlang-compile-use-outdir nil)")
-
-(defvar erlang-indent-level 4
- "*Indentation of Erlang calls/clauses within blocks.")
-(put 'erlang-indent-level 'safe-local-variable 'integerp)
-
-(defvar erlang-icr-indent nil
- "*Indentation of Erlang if/case/receive/ patterns. `nil' means
- keeping default behavior. When non-nil, indent to th column of
- if/case/receive.")
-
-(defvar erlang-indent-guard 2
- "*Indentation of Erlang guards.")
-(put 'erlang-indent-guard 'safe-local-variable 'integerp)
-
-(defvar erlang-argument-indent 2
- "*Indentation of the first argument in a function call.
+ (setq erlang-compile-use-outdir nil)"
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
+
+(defcustom erlang-indent-level 4
+ "Indentation of Erlang calls/clauses within blocks."
+ :group 'erlang
+ :type 'integer
+ :safe 'integerp)
+
+(defcustom erlang-icr-indent nil
+ "Indentation of Erlang if/case/receive patterns.
+nil means keeping default behavior. When non-nil, indent to the column of
+if/case/receive."
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
+
+(defcustom erlang-indent-guard 2
+ "Indentation of Erlang guards."
+ :group 'erlang
+ :type 'integer
+ :safe 'integerp)
+
+(defcustom erlang-argument-indent 2
+ "Indentation of the first argument in a function call.
When nil, indent to the column after the `(' of the
-function.")
-(put 'erlang-argument-indent 'safe-local-variable '(lambda (val) (or (null val) (integerp val))))
-
-(defvar erlang-tab-always-indent t
- "*Non-nil means TAB in Erlang mode should always re-indent the current line,
-regardless of where in the line point is when the TAB command is used.")
+function."
+ :group 'erlang
+ :type '(restricted-sexp :match-alternatives (integerp 'nil))
+ :safe (lambda (val) (or (eq val nil) (integerp val))))
+
+(defcustom erlang-tab-always-indent t
+ "Non-nil means TAB in Erlang mode should always re-indent the current line,
+regardless of where in the line point is when the TAB command is used."
+ :group 'erlang
+ :type 'boolean
+ :safe 'booleanp)
(defvar erlang-man-inhibit (eq system-type 'windows-nt)
"Inhibit the creation of the Erlang Manual Pages menu.
@@ -509,7 +524,7 @@ there is no attempt to create the menu.")
("Man - Modules" "/man/man3" t)
("Man - Files" "/man/man4" t)
("Man - Applications" "/man/man6" t))
- "*The man directories displayed in the Erlang menu.
+ "The man directories displayed in the Erlang menu.
Each item in the list should be a list with three elements, the first
the name of the menu, the second the directory, and the last a flag.
@@ -517,17 +532,17 @@ Should the flag the nil, the directory is absolute, should it be non-nil
the directory is relative to the variable `erlang-root-dir'.")
(defvar erlang-man-max-menu-size 35
- "*The maximum number of menu items in one menu allowed.")
+ "The maximum number of menu items in one menu allowed.")
(defvar erlang-man-display-function 'erlang-man-display
- "*Function used to display man page.
+ "Function used to display man page.
The function is called with one argument, the name of the file
containing the man page. Use this variable when the default
function, `erlang-man-display', does not work on your system.")
(defvar erlang-compile-extra-opts '()
- "*Additional options to the compilation command.
+ "Additional options to the compilation command.
This is an elisp list of options. Each option can be either:
- an atom
- a dotted pair
@@ -539,7 +554,7 @@ Example: '(bin_opt_info (i . \"/path1/include\") (i . \"/path2/include\"))")
(".xrl\\'" . inferior-erlang-compute-leex-compile-command)
(".yrl\\'" . inferior-erlang-compute-yecc-compile-command)
("." . inferior-erlang-compute-erl-compile-command))
- "*Alist of filename patterns vs corresponding compilation functions.
+ "Alist of filename patterns vs corresponding compilation functions.
Each element looks like (REGEXP . FUNCTION). Compiling a file whose name
matches REGEXP specifies FUNCTION to use to compute the compilation
command. The FUNCTION will be called with two arguments: module name and
@@ -547,14 +562,14 @@ default compilation options, like output directory. The FUNCTION
is expected to return a string.")
(defvar erlang-leex-compile-opts '()
- "*Options to pass to leex when compiling xrl files.
+ "Options to pass to leex when compiling xrl files.
This is an elisp list of options. Each option can be either:
- an atom
- a dotted pair
- a string")
(defvar erlang-yecc-compile-opts '()
- "*Options to pass to yecc when compiling yrl files.
+ "Options to pass to yecc when compiling yrl files.
This is an elisp list of options. Each option can be either:
- an atom
- a dotted pair
@@ -562,7 +577,7 @@ This is an elisp list of options. Each option can be either:
(eval-and-compile
(defvar erlang-regexp-modern-p
- (if (> erlang-emacs-major-version 21) t nil)
+ (if (> emacs-major-version 21) t nil)
"Non-nil when this version of Emacs uses a modern version of regexp.
Supporting \_< and \_> This is determined by checking the version of Emacs used."))
@@ -608,6 +623,24 @@ The regexp must be surrounded with a pair of regexp parentheses."))
This is used to determine matches in complex regexps which contains
`erlang-variable-regexp'."))
+(defconst erlang-module-function-regexp
+ (eval-when-compile
+ (concat erlang-atom-regexp ":" erlang-atom-regexp))
+ "Regexp matching an erlang module:function.")
+
+(defconst erlang-name-regexp
+ (concat "\\("
+ "\\(?:\\sw\\|\\s_\\)+"
+ "\\|"
+ erlang-atom-quoted-regexp
+ "\\)")
+ "Matches a name of a function, macro or record")
+
+(defconst erlang-id-regexp
+ (concat "\\(?:\\(qualified-function\\|record\\|macro\\|module\\) \\)?"
+ "\\(?:" erlang-atom-regexp ":\\)?"
+ erlang-name-regexp "?"
+ "\\(?:/\\([0-9]+\\)\\)?"))
(eval-and-compile
(defun erlang-regexp-opt (strings &optional paren)
@@ -983,7 +1016,7 @@ resulting regexp is surrounded by \\_< and \\_>."
"Regexp which should match beginning of a clause.")
(defvar erlang-file-name-extension-regexp "\\.erl$"
- "*Regexp which should match an Erlang file name.
+ "Regexp which should match an Erlang file name.
This regexp is used when an Erlang module name is extracted from the
name of an Erlang source file.
@@ -997,7 +1030,7 @@ tags system should interpret tags on the form `module:tag' for
files written in other languages than Erlang.")
(defvar erlang-inferior-shell-split-window t
- "*If non-nil, when starting an inferior shell, split windows.
+ "If non-nil, when starting an inferior shell, split windows.
If nil, the inferior shell replaces the window. This is the traditional
behaviour.")
@@ -1043,7 +1076,7 @@ behaviour.")
(unless inferior-erlang-use-cmm
(define-key map "\C-x`" 'erlang-next-error))
map)
- "*Keymap used in Erlang mode.")
+ "Keymap used in Erlang mode.")
(defvar erlang-mode-abbrev-table nil
"Abbrev table in use in Erlang-mode buffers.")
(defvar erlang-mode-syntax-table nil
@@ -1310,29 +1343,6 @@ replaced by `erlang-etags-tags-completion-table'.")
;;; Avoid errors while compiling this file.
-;; `eval-when-compile' is not defined in Emacs 18. We define it as a
-;; no-op.
-(or (fboundp 'eval-when-compile)
- (defmacro eval-when-compile (&rest rest) nil))
-
-;; These umm...functions are new in Emacs 20. And, yes, until version
-;; 19.27 Emacs backquotes were this ugly.
-
-(or (fboundp 'unless)
- (defmacro unless (condition &rest body)
- "(unless CONDITION BODY...): If CONDITION is false, do BODY, else return nil."
- `((if (, condition) nil ,@body))))
-
-(or (fboundp 'when)
- (defmacro when (condition &rest body)
- "(when CONDITION BODY...): If CONDITION is true, do BODY, else return nil."
- `((if (, condition) (progn ,@body) nil))))
-
-(or (fboundp 'char-before)
- (defmacro char-before (&optional pos)
- "Return the character in the current buffer just before POS."
- `( (char-after (1- (or ,pos (point)))))))
-
;; defvar some obsolete variables, which we still support for
;; backwards compatibility reasons.
(eval-when-compile
@@ -1360,20 +1370,11 @@ replaced by `erlang-etags-tags-completion-table'.")
(defun erlang-version ()
"Return the current version of Erlang mode."
(interactive)
- (if (erlang-interactive-p)
+ (if (called-interactively-p 'interactive)
(message "Erlang mode version %s, written by Anders Lindgren"
erlang-version))
erlang-version)
-(defun erlang-interactive-p ()
- (if (fboundp 'called-interactively-p)
- (called-interactively-p 'interactive)
- (funcall (symbol-function 'interactive-p))))
-
-(unless (fboundp 'prog-mode)
- (defun prog-mode ()
- (use-local-map (make-keymap))))
-
;;;###autoload
(define-derived-mode erlang-mode prog-mode "Erlang"
"Major mode for editing Erlang source files in Emacs.
@@ -1462,40 +1463,43 @@ Other commands:
(add-to-list 'auto-mode-alist (cons r 'erlang-mode)))
(defun erlang-syntax-table-init ()
- (if (null erlang-mode-syntax-table)
- (let ((table (make-syntax-table)))
- (modify-syntax-entry ?\n ">" table)
- (modify-syntax-entry ?\" "\"" table)
- (modify-syntax-entry ?# "." table)
- ;; (modify-syntax-entry ?$ "\\" table) ;; Creates problems with indention afterwards
- ;; (modify-syntax-entry ?$ "'" table) ;; Creates syntax highlighting and indention problems
- (modify-syntax-entry ?$ "/" table) ;; Misses the corner case "string that ends with $"
- ;; we have to live with that for now..it is the best alternative
- ;; that can be worked around with "string hat ends with \$"
- (modify-syntax-entry ?% "<" table)
- (modify-syntax-entry ?& "." table)
- (modify-syntax-entry ?\' "\"" table)
- (modify-syntax-entry ?* "." table)
- (modify-syntax-entry ?+ "." table)
- (modify-syntax-entry ?- "." table)
- (modify-syntax-entry ?/ "." table)
- (modify-syntax-entry ?: "." table)
- (modify-syntax-entry ?< "." table)
- (modify-syntax-entry ?= "." table)
- (modify-syntax-entry ?> "." table)
- (modify-syntax-entry ?\\ "\\" table)
- (modify-syntax-entry ?_ "_" table)
- (modify-syntax-entry ?| "." table)
- (modify-syntax-entry ?^ "'" table)
-
- ;; Pseudo bit-syntax: Latin1 double angle quotes as parens.
- ;;(modify-syntax-entry ?\253 "(?\273" table)
- ;;(modify-syntax-entry ?\273 ")?\253" table)
-
- (setq erlang-mode-syntax-table table)))
-
+ (erlang-ensure-syntax-table-is-initialized)
(set-syntax-table erlang-mode-syntax-table))
+(defun erlang-ensure-syntax-table-is-initialized ()
+ (unless erlang-mode-syntax-table
+ (let ((table (make-syntax-table)))
+ (modify-syntax-entry ?\n ">" table)
+ (modify-syntax-entry ?\" "\"" table)
+ (modify-syntax-entry ?# "." table)
+ ;; (modify-syntax-entry ?$ "\\" table) ;; Creates problems with indention afterwards
+ ;; (modify-syntax-entry ?$ "'" table) ;; Creates syntax highlighting and indention problems
+ (modify-syntax-entry ?$ "/" table) ;; Misses the corner case "string that ends with $"
+ ;; we have to live with that for now..it is the best alternative
+ ;; that can be worked around with "string that ends with \$"
+ (modify-syntax-entry ?% "<" table)
+ (modify-syntax-entry ?& "." table)
+ (modify-syntax-entry ?\' "\"" table)
+ (modify-syntax-entry ?* "." table)
+ (modify-syntax-entry ?+ "." table)
+ (modify-syntax-entry ?- "." table)
+ (modify-syntax-entry ?/ "." table)
+ (modify-syntax-entry ?: "." table)
+ (modify-syntax-entry ?< "." table)
+ (modify-syntax-entry ?= "." table)
+ (modify-syntax-entry ?> "." table)
+ (modify-syntax-entry ?\\ "\\" table)
+ (modify-syntax-entry ?_ "_" table)
+ (modify-syntax-entry ?| "." table)
+ (modify-syntax-entry ?^ "'" table)
+
+ ;; Pseudo bit-syntax: Latin1 double angle quotes as parens.
+ ;;(modify-syntax-entry ?\253 "(?\273" table)
+ ;;(modify-syntax-entry ?\273 ")?\253" table)
+
+ (setq erlang-mode-syntax-table table))))
+
+
(defun erlang-electric-init ()
;; Set up electric character functions to work with
@@ -1541,7 +1545,7 @@ Other commands:
(make-local-variable 'indent-region-function)
(setq indent-region-function 'erlang-indent-region)
(set (make-local-variable 'comment-indent-function) 'erlang-comment-indent)
- (if (<= erlang-emacs-major-version 18)
+ (if (<= emacs-major-version 18)
(set (make-local-variable 'comment-indent-hook) 'erlang-comment-indent))
(set (make-local-variable 'parse-sexp-ignore-comments) t)
(set (make-local-variable 'dabbrev-case-fold-search) nil)
@@ -1778,7 +1782,7 @@ Please see the variable `erlang-menu-base-items'."
(if (and popup (boundp 'mode-popup-menu))
(funcall (symbol-function 'set)
'mode-popup-menu erlang-xemacs-popup-menu))))
- ((>= erlang-emacs-major-version 19)
+ ((>= emacs-major-version 19)
(define-key keymap (vector 'menu-bar (intern name))
(erlang-menu-make-keymap name items)))
(t nil)))
@@ -1961,7 +1965,9 @@ menu is left unchanged."
The variable `erlang-man-dirs' contains entries describing
the location of the manual pages."
(interactive)
- (if erlang-man-inhibit
+ (if (or erlang-man-inhibit
+ (and (boundp 'menu-bar-mode)
+ (not menu-bar-mode)))
()
(setq erlang-menu-man-items
'(nil
@@ -2000,7 +2006,7 @@ The format is described in the documentation of `erlang-man-dirs'."
(setq dir (cond ((nth 2 (car dir-list))
;; Relative to `erlang-root-dir'.
(and (stringp erlang-root-dir)
- (concat erlang-root-dir (nth 1 (car dir-list)))))
+ (erlang-man-dir (nth 1 (car dir-list)))))
(t
;; Absolute
(nth 1 (car dir-list)))))
@@ -2018,6 +2024,8 @@ The format is described in the documentation of `erlang-man-dirs'."
'(("Man Pages"
(("Error! Why?" erlang-man-describe-error)))))))
+(defun erlang-man-dir (subdir)
+ (concat erlang-root-dir "/lib/erlang/" subdir))
;; Should the menu be to long, let's split it into a number of
;; smaller menus. Warning, this code contains beautiful
@@ -2080,7 +2088,7 @@ menus is created."
"Find manual page for MODULE, defaults to module of function under point.
This function is aware of imported functions."
(interactive
- (list (let* ((mod (car-safe (erlang-get-function-under-point)))
+ (list (let* ((mod (erlang-default-module))
(input (read-string
(format "Manual entry for module%s: "
(if (or (null mod) (string= mod ""))
@@ -2089,26 +2097,36 @@ This function is aware of imported functions."
(if (string= input "")
mod
input))))
- (or module (setq module (car (erlang-get-function-under-point))))
- (if (or (null module) (string= module ""))
- (error "No Erlang module name given"))
+ (setq module (or module
+ (erlang-default-module)))
+ (when (or (null module) (string= module ""))
+ (error "No Erlang module name given"))
(let ((dir-list erlang-man-dirs)
- (pat (concat "/" (regexp-quote module) "\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$"))
+ (pat (concat "/" (regexp-quote module)
+ "\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$"))
(file nil)
file-list)
(while (and dir-list (null file))
- (setq file-list (erlang-man-get-files
- (if (nth 2 (car dir-list))
- (concat erlang-root-dir (nth 1 (car dir-list)))
- (nth 1 (car dir-list)))))
- (while (and file-list (null file))
- (if (string-match pat (car file-list))
- (setq file (car file-list)))
- (setq file-list (cdr file-list)))
- (setq dir-list (cdr dir-list)))
+ (let ((dir (if (nth 2 (car dir-list))
+ (erlang-man-dir (nth 1 (car dir-list)))
+ (nth 1 (car dir-list)))))
+ (when (file-directory-p dir)
+ (setq file-list (erlang-man-get-files dir))
+ (while (and file-list (null file))
+ (if (string-match pat (car file-list))
+ (setq file (car file-list)))
+ (setq file-list (cdr file-list))))
+ (setq dir-list (cdr dir-list))))
(if file
(funcall erlang-man-display-function file)
- (error "No manual page for module %s found" module))))
+ ;; Did not found the manual file. Fallback to manual-entry.
+ (manual-entry module))))
+
+(defun erlang-default-module ()
+ (let ((id (erlang-get-identifier-at-point)))
+ (if (eq (erlang-id-kind id) 'qualified-function)
+ (erlang-id-module id)
+ (erlang-id-name id))))
;; Warning, the function `erlang-man-function' is a hack!
@@ -2128,37 +2146,28 @@ The entry for `function' is displayed.
This function is aware of imported functions."
(interactive
- (list (let* ((mod-func (erlang-get-function-under-point))
- (mod (car-safe mod-func))
- (func (nth 1 mod-func))
+ (list (let* ((default (erlang-default-function-or-module))
(input (read-string
(format
"Manual entry for `module:func' or `module'%s: "
- (if (or (null mod) (string= mod ""))
- ""
- (format " (default %s:%s)" mod func))))))
+ (if default
+ (format " (default %s)" default)
+ "")))))
(if (string= input "")
- (if (and mod func)
- (concat mod ":" func)
- mod)
+ default
input))))
- ;; Emacs 18 doesn't provide `man'...
- (condition-case nil
- (require 'man)
- (error nil))
+ (require 'man)
+ (setq name (or name
+ (erlang-default-function-or-module)))
(let ((modname nil)
(funcname nil))
- (cond ((null name)
- (let ((mod-func (erlang-get-function-under-point)))
- (setq modname (car-safe mod-func))
- (setq funcname (nth 1 mod-func))))
- ((string-match ":" name)
+ (cond ((string-match ":" name)
(setq modname (substring name 0 (match-beginning 0)))
(setq funcname (substring name (match-end 0) nil)))
((stringp name)
(setq modname name)))
- (if (or (null modname) (string= modname ""))
- (error "No Erlang module name given"))
+ (when (or (null modname) (string= modname ""))
+ (error "No Erlang module name given"))
(cond ((fboundp 'Man-notify-when-ready)
;; Emacs 19: The man command could possibly start an
;; asynchronous process, i.e. we must hook ourselves into
@@ -2168,16 +2177,20 @@ This function is aware of imported functions."
()
(erlang-man-patch-notify)
(setq erlang-man-function-name funcname))
- (condition-case nil
+ (condition-case err
(erlang-man-module modname)
- (error (setq erlang-man-function-name nil))))
+ (error (setq erlang-man-function-name nil)
+ (signal (car err) (cdr err)))))
(t
(erlang-man-module modname)
- (if funcname
- (erlang-man-find-function
- (or (get-buffer "*Manual Entry*") ; Emacs 18
- (current-buffer)) ; XEmacs
- funcname))))))
+ (when funcname
+ (erlang-man-find-function (current-buffer) funcname))))))
+
+(defun erlang-default-function-or-module ()
+ (let ((id (erlang-get-identifier-at-point)))
+ (if (eq (erlang-id-kind id) 'qualified-function)
+ (format "%s:%s" (erlang-id-module id) (erlang-id-name id))
+ (erlang-id-name id))))
;; Should the defadvice be at the top level, the package `advice' would
@@ -2222,36 +2235,22 @@ command is executed asynchronously."
(set-window-point win (point)))
(message "Could not find function `%s'" func)))))))
+(defvar erlang-man-file-regexp
+ "\\(.*\\)/man[^/]*/\\([^.]+\\)\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$")
(defun erlang-man-display (file)
"Display FILE as a `man' file.
This is the default manual page display function.
The variables `erlang-man-display-function' contains the function
to be used."
- ;; Emacs 18 doesn't `provide' man.
- (condition-case nil
- (require 'man)
- (error nil))
+ (require 'man)
(if file
(let ((process-environment (copy-sequence process-environment)))
- (if (string-match "\\(.*\\)/man[^/]*/\\([^.]+\\)\\.\\([124-9]\\|3\\(erl\\)?\\)\\(\\.gz\\)?$" file)
+ (if (string-match erlang-man-file-regexp file)
(let ((dir (substring file (match-beginning 1) (match-end 1)))
(page (substring file (match-beginning 2) (match-end 2))))
- (if (fboundp 'setenv)
- (setenv "MANPATH" dir)
- ;; Emacs 18
- (setq process-environment (cons (concat "MANPATH=" dir)
- process-environment)))
- (cond ((not (and (not erlang-xemacs-p)
- (= erlang-emacs-major-version 19)
- (< erlang-emacs-minor-version 29)))
- (manual-entry page))
- (t
- ;; Emacs 19.28 and earlier versions of 19:
- ;; The manual-entry command unconditionally prompts
- ;; the user :-(
- (funcall (symbol-function 'Man-getpage-in-background)
- page))))
+ (setenv "MANPATH" dir)
+ (manual-entry page))
(error "Can't find man page for %s\n" file)))))
@@ -2394,7 +2393,7 @@ can contain other `tempo' attributes. Please see the function
The first character of DD is space if the value is less than 10."
(let ((date (current-time-string)))
(format "%2d %s %s"
- (erlang-string-to-int (substring date 8 10))
+ (string-to-number (substring date 8 10))
(substring date 4 7)
(substring date -4))))
@@ -2956,10 +2955,10 @@ Return nil if inside string, t if in a comment."
((eq (car stack-top) '->)
;; If in fun definition use standard indent level not double
;;(if (not (eq (car (car (cdr stack))) 'fun))
- ;; Removed it made multi clause fun's look to bad
+ ;; Removed it made multi clause fun's look too bad
(setq off (+ erlang-indent-level (if (not erlang-icr-indent)
erlang-indent-level
- erlang-icr-indent)))))
+ erlang-icr-indent)))))
(let ((base (erlang-indent-find-base stack indent-point off skip)))
;; Special cases
(goto-char indent-point)
@@ -3597,7 +3596,7 @@ corresponds to the order of the parsed Erlang list."
(erlang-remove-quotes
(erlang-buffer-substring
(match-beginning 1) (match-end 1)))
- (erlang-string-to-int
+ (string-to-number
(erlang-buffer-substring
(match-beginning
(+ 1 erlang-atom-regexp-matches))
@@ -3696,34 +3695,50 @@ Normally used in conjunction with `erlang-beginning-of-clause', e.g.:
(defun erlang-get-function-arity ()
"Return the number of arguments of function at point, or nil."
- (and (looking-at (eval-when-compile
- (concat "^" erlang-atom-regexp "\\s *(")))
- (save-excursion
- (goto-char (match-end 0))
- (condition-case nil
- (let ((res 0)
- (cont t))
- (while cont
- (cond ((eobp)
- (setq res nil)
- (setq cont nil))
- ((looking-at "\\s *)")
- (setq cont nil))
- ((looking-at "\\s *\\($\\|%\\)")
- (forward-line 1))
- ((looking-at "\\s *<<[^>]*?>>")
- (when (zerop res)
- (setq res (+ 1 res)))
- (goto-char (match-end 0)))
- ((looking-at "\\s *,")
- (setq res (+ 1 res))
- (goto-char (match-end 0)))
- (t
- (when (zerop res)
- (setq res (+ 1 res)))
- (forward-sexp 1))))
- res)
- (error nil)))))
+ (erlang-get-arity-after-regexp (concat "^" erlang-atom-regexp "\\s *(")))
+
+(defun erlang-get-argument-list-arity ()
+ "Return the number of arguments in argument list at point, or nil.
+The point should be before the opening parenthesis of the
+argument list before calling this function."
+ (erlang-get-arity-after-regexp "\\s *("))
+
+(defun erlang-get-arity-after-regexp (regexp)
+ "Return the number of arguments in argument list after REGEXP, or nil."
+ (when (looking-at regexp)
+ (save-excursion
+ (goto-char (match-end 0))
+ (erlang-get-arity))))
+
+(defun erlang-get-arity ()
+ "Return the number of arguments in argument list at point, or nil.
+The point should be after the opening parenthesis of the argument
+list before calling this function."
+ (condition-case nil
+ (let ((res 0)
+ (cont t))
+ (while cont
+ (cond ((eobp)
+ (setq res nil)
+ (setq cont nil))
+ ((looking-at "\\s *)")
+ (setq cont nil))
+ ((looking-at "\\s *\\($\\|%\\)")
+ (forward-line 1))
+ ((looking-at "\\s *<<[^>]*?>>")
+ (when (zerop res)
+ (setq res (+ 1 res)))
+ (goto-char (match-end 0)))
+ ((looking-at "\\s *,")
+ (setq res (+ 1 res))
+ (goto-char (match-end 0)))
+ (t
+ (when (zerop res)
+ (setq res (+ 1 res)))
+ (forward-sexp 1))))
+ res)
+ (error nil)))
+
(defun erlang-get-function-name-and-arity ()
"Return the name and arity of the function at point, or nil.
@@ -3746,6 +3761,8 @@ The return value is a string of the form \"foo/1\"."
(error nil)))))
+;; Keeping erlang-get-function-under-point for backward compatibility.
+;; It is used by erldoc.el and maybe other code out there.
(defun erlang-get-function-under-point ()
"Return the module and function under the point, or nil.
@@ -3755,44 +3772,141 @@ list of imported functions is searched.
The following could be returned:
(\"module\" \"function\") -- Both module and function name found.
(nil \"function\") -- No module name was found.
- nil -- No function name found
+ nil -- No function name found.
+
+See also `erlang-get-identifier-at-point'."
+ (let* ((id (erlang-get-identifier-at-point))
+ (kind (erlang-id-kind id))
+ (module (erlang-id-module id))
+ (name (erlang-id-name id)))
+ (cond ((eq kind 'qualified-function)
+ (list module name))
+ (name
+ (list nil name)))))
+
+(defun erlang-get-identifier-at-point ()
+ "Return the erlang identifier at point, or nil.
+
+Should no explicit module name be present at the point, the
+list of imported functions is searched.
+
+When an identifier is found return a list with 4 elements:
+
+1. Kind - One of the symbols qualified-function, record, macro,
+module or nil.
+
+2. Module - Module name string or nil. In case of a
+qualified-function a search fails if no entries with correct
+module are found. For other kinds the module is just a
+preference. If no matching entries are found the search will be
+retried without regard to module.
+
+3. Name - String name of function, module, record or macro.
-In the future the list may contain more elements."
+4. Arity - Integer in case of functions and macros if the number
+of arguments could be found, otherwise nil."
(save-excursion
- (let ((md (match-data))
- (res nil))
+ (save-match-data
(if (eq (char-syntax (following-char)) ? )
(skip-chars-backward " \t"))
- (skip-chars-backward "a-zA-Z0-9_:'")
- (cond ((looking-at (eval-when-compile
- (concat erlang-atom-regexp ":" erlang-atom-regexp)))
- (setq res (list
- (erlang-remove-quotes
- (erlang-buffer-substring
- (match-beginning 1) (match-end 1)))
- (erlang-remove-quotes
- (erlang-buffer-substring
- (match-beginning (1+ erlang-atom-regexp-matches))
- (match-end (1+ erlang-atom-regexp-matches)))))))
- ((looking-at erlang-atom-regexp)
- (let ((fk (erlang-remove-quotes
- (erlang-buffer-substring
- (match-beginning 0) (match-end 0))))
- (mod nil)
- (imports (erlang-get-import)))
- (while (and imports (null mod))
- (if (assoc fk (cdr (car imports)))
- (setq mod (car (car imports)))
- (setq imports (cdr imports))))
- (cond ((eq (preceding-char) ?#)
- (setq fk (concat "-record(" fk)))
- ((eq (preceding-char) ??)
- (setq fk (concat "-define(" fk)))
- ((and (null mod) (not (member fk erlang-int-bifs)))
- (setq mod (erlang-get-module))))
- (setq res (list mod fk)))))
- (store-match-data md)
- res)))
+ (skip-chars-backward "[:word:]_:'")
+ (cond ((looking-at erlang-module-function-regexp)
+ (erlang-get-qualified-function-id-at-point))
+ ((looking-at (concat erlang-atom-regexp ":"))
+ (erlang-get-module-id-at-point))
+ ((looking-at erlang-name-regexp)
+ (erlang-get-some-other-id-at-point))))))
+
+(defun erlang-get-qualified-function-id-at-point ()
+ (let ((kind 'qualified-function)
+ (module (erlang-remove-quotes
+ (erlang-buffer-substring
+ (match-beginning 1) (match-end 1))))
+ (name (erlang-remove-quotes
+ (erlang-buffer-substring
+ (match-beginning (1+ erlang-atom-regexp-matches))
+ (match-end (1+ erlang-atom-regexp-matches)))))
+ (arity (progn
+ (goto-char (match-end 0))
+ (erlang-get-argument-list-arity))))
+ (list kind module name arity)))
+
+(defun erlang-get-module-id-at-point ()
+ (let ((kind 'module)
+ (module nil)
+ (name (erlang-remove-quotes
+ (erlang-buffer-substring (match-beginning 1)
+ (match-end 1))))
+ (arity nil))
+ (list kind module name arity)))
+
+(defun erlang-get-some-other-id-at-point ()
+ (let ((name (erlang-remove-quotes
+ (erlang-buffer-substring
+ (match-beginning 0) (match-end 0))))
+ (imports (erlang-get-import))
+ kind module arity)
+ (while (and imports (null module))
+ (if (assoc name (cdr (car imports)))
+ (setq module (car (car imports)))
+ (setq imports (cdr imports))))
+ (cond ((eq (preceding-char) ?#)
+ (setq kind 'record))
+ ((eq (preceding-char) ??)
+ (setq kind 'macro))
+ ((and (null module) (not (member name erlang-int-bifs)))
+ (setq module (erlang-get-module))))
+ (setq arity (progn
+ (goto-char (match-end 0))
+ (erlang-get-argument-list-arity)))
+ (list kind module name arity)))
+
+(defmacro erlang-with-id (slots id-string &rest body)
+ (declare (indent 2))
+ (let ((id-var (make-symbol "id")))
+ `(let* ((,id-var (erlang-id-to-list ,id-string))
+ ,@(mapcar (lambda (slot)
+ (list slot
+ (list (intern (format "erlang-id-%s" slot))
+ id-var)))
+ slots))
+ ,@body)))
+
+(defun erlang-id-to-string (id)
+ (when id
+ (erlang-with-id (kind module name arity) id
+ (format "%s%s%s%s"
+ (if kind (format "%s " kind) "")
+ (if module (format "%s:" module) "")
+ name
+ (if arity (format "/%s" arity) "")))))
+
+(defun erlang-id-to-list (id)
+ (if (listp id)
+ id
+ (save-match-data
+ (erlang-ensure-syntax-table-is-initialized)
+ (with-syntax-table erlang-mode-syntax-table
+ (let (case-fold-search)
+ (when (string-match erlang-id-regexp id)
+ (list (when (match-string 1 id)
+ (intern (match-string 1 id)))
+ (match-string 2 id)
+ (match-string 3 id)
+ (when (match-string 4 id)
+ (string-to-number (match-string 4 id))))))))))
+
+(defun erlang-id-kind (id)
+ (car (erlang-id-to-list id)))
+
+(defun erlang-id-module (id)
+ (nth 1 (erlang-id-to-list id)))
+
+(defun erlang-id-name (id)
+ (nth 2 (erlang-id-to-list id)))
+
+(defun erlang-id-arity (id)
+ (nth 3 (erlang-id-to-list id)))
;; TODO: Escape single quotes inside the string without
@@ -3822,10 +3936,10 @@ In the future the list may contain more elements."
"Returns non-nil if there is an exported function in the current
buffer between point and MAX."
(block nil
- (while (and (not erlang-inhibit-exported-function-name-face)
- (erlang-match-next-function max))
- (when (erlang-last-match-exported-p)
- (return (match-data))))))
+ (while (and (not erlang-inhibit-exported-function-name-face)
+ (erlang-match-next-function max))
+ (when (erlang-last-match-exported-p)
+ (return (match-data))))))
(defun erlang-match-next-function (max)
"Searches forward in current buffer for the next erlang function,
@@ -4084,7 +4198,7 @@ non-whitespace characters following the point on the current line."
nil)))
-(defun erlang-electric-arrow\ off (&optional arg)
+(defun erlang-electric-arrow (&optional arg)
"Insert a '>'-sign and possibly a new indented line.
This command is only `electric' when the `>' is part of an `->' arrow.
@@ -4310,8 +4424,8 @@ This function is designed to be a member of a criteria list."
(looking-at "end[^_a-zA-Z0-9]")))
-;; Erlang tags support which is aware of erlang modules.
-;;
+;;; Erlang tags support which is aware of erlang modules.
+
;; Not yet implemented under XEmacs. (Hint: The Emacs 19 etags
;; package works under XEmacs.)
@@ -4369,7 +4483,7 @@ This function only works under Emacs 18 and Emacs 19. Currently, It
is not implemented under XEmacs. (Hint: The Emacs 19 etags module
works under XEmacs.)"
(interactive)
- (cond ((= erlang-emacs-major-version 18)
+ (cond ((= emacs-major-version 18)
(require 'tags)
(erlang-tags-define-keys (current-local-map))
(setq erlang-tags-installed t))
@@ -4409,20 +4523,6 @@ works under XEmacs.)"
(erlang-menu-substitute erlang-menu-base-items erlang-tags-function-alist)
(erlang-menu-init))
-
-(defun erlang-find-tag-default ()
- "Return the default tag.
-Search `-import' list of imported functions.
-Single quotes are been stripped away."
- (let ((mod-func (erlang-get-function-under-point)))
- (cond ((null mod-func)
- nil)
- ((null (car mod-func))
- (nth 1 mod-func))
- (t
- (concat (car mod-func) ":" (nth 1 mod-func))))))
-
-
;; Return `t' since it is used inside `tags-loop-form'.
;;;###autoload
(defun erlang-find-tag (modtagname &optional next-p regexp-p)
@@ -4609,7 +4709,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'."
(list nil (if (< (prefix-numeric-value current-prefix-arg) 0)
'-
t))
- (let* ((default (erlang-find-tag-default))
+ (let* ((default (erlang-default-function-or-module))
(prompt (if default
(format "%s(default %s) " prompt default)
prompt))
@@ -4633,7 +4733,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'."
;; Make sure our functions are installed in TAGS files loaded
;; into Emacs while searching.
(cond
- ((>= erlang-emacs-major-version 20)
+ ((>= emacs-major-version 20)
(setq erlang-tags-orig-format-functions
(symbol-value 'tags-table-format-functions))
(funcall (symbol-function 'set) 'tags-table-format-functions
@@ -4711,7 +4811,7 @@ Tags can be given on the forms `tag', `module:', `module:tag'."
(defun erlang-tags-remove-module-check ()
"Remove our own tags search functions."
(cond
- ((>= erlang-emacs-major-version 20)
+ ((>= emacs-major-version 20)
(funcall (symbol-function 'set)
'tags-table-format-functions
erlang-tags-orig-format-functions)
@@ -4961,6 +5061,14 @@ about Erlang modules."
;; It adds awareness of the module:tag syntax in a similar way that is
;; done above for the old etags commands.
+(defvar erlang-current-arity nil
+ "The arity of the function currently being searched.
+
+There is no information about arity in the TAGS file.
+Consecutive functions with same name but different arity will
+only get one entry in the TAGS file. Matching TAGS entries are
+therefore selected without regarding arity. The arity is
+considered first when it is time to jump to the definition.")
(defun erlang-etags--xref-backend () 'erlang-etags)
@@ -4970,13 +5078,14 @@ about Erlang modules."
(and (erlang-soft-require 'xref)
(erlang-soft-require 'cl-generic)
+ (erlang-soft-require 'eieio)
;; The purpose of using eval here is to avoid compilation
- ;; warnings in emacsen without cl-defmethod.
+ ;; warnings in emacsen without cl-defmethod etc.
(eval
'(progn
(cl-defmethod xref-backend-identifier-at-point
((_backend (eql erlang-etags)))
- (erlang-find-tag-default))
+ (erlang-id-to-string (erlang-get-identifier-at-point)))
(cl-defmethod xref-backend-definitions
((_backend (eql erlang-etags)) identifier)
@@ -4989,42 +5098,99 @@ about Erlang modules."
(cl-defmethod xref-backend-identifier-completion-table
((_backend (eql erlang-etags)))
(let ((erlang-replace-etags-tags-completion-table t))
- (tags-completion-table))))))
-
-
+ (tags-completion-table)))
+
+ (defclass erlang-xref-location (xref-etags-location) ())
+
+ (defun erlang-convert-xrefs (xrefs)
+ (mapcar (lambda (xref)
+ (oset xref location (erlang-make-location
+ (oref xref location)))
+ xref)
+ xrefs))
+
+ (defun erlang-make-location (etags-location)
+ (with-slots (tag-info file) etags-location
+ (make-instance 'erlang-xref-location :tag-info tag-info
+ :file file)))
+
+ (cl-defmethod xref-location-marker ((locus erlang-xref-location))
+ (with-slots (tag-info file) locus
+ (with-current-buffer (find-file-noselect file)
+ (save-excursion
+ (or (erlang-goto-tag-location-by-arity tag-info)
+ (etags-goto-tag-location tag-info))
+ ;; Reset erlang-current-arity. We want to jump to
+ ;; correct arity in the first attempt. That is now
+ ;; done. Possible remaining jumps will be from
+ ;; entries in the *xref* buffer and then we want to
+ ;; ignore the arity. (Alternatively we could remove
+ ;; all but one xref entry per file when we know the
+ ;; arity).
+ (setq erlang-current-arity nil)
+ (point-marker)))))
+
+ (defun erlang-xref-context (xref)
+ (with-slots (tag-info) (xref-item-location xref)
+ (car tag-info))))))
+
+
+(defun erlang-goto-tag-location-by-arity (tag-info)
+ (when erlang-current-arity
+ (let* ((tag-text (car tag-info))
+ (tag-pos (cdr (cdr tag-info)))
+ (tag-line (car (cdr tag-info)))
+ (regexp (erlang-tag-info-regexp tag-text))
+ (startpos (or tag-pos
+ (when tag-line
+ (goto-char (point-min))
+ (forward-line (1- tag-line))
+ (point))
+ (point-min))))
+ (setq startpos (max (- startpos 2000)
+ (point-min)))
+ (goto-char startpos)
+ (let ((pos (or (erlang-search-by-arity regexp)
+ (unless (eq startpos (point-min))
+ (goto-char (point-min))
+ (erlang-search-by-arity regexp)))))
+ (when pos
+ (goto-char pos)
+ t)))))
+
+(defun erlang-tag-info-regexp (tag-text)
+ (concat "^"
+ (regexp-quote tag-text)
+ ;; Erlang function entries in TAGS includes the opening
+ ;; parenthesis for the argument list. Erlang macro entries
+ ;; do not. Add it here in order to end up in correct
+ ;; position for erlang-get-arity.
+ (if (string-prefix-p "-define" tag-text)
+ "\\s-*("
+ "")))
+
+(defun erlang-search-by-arity (regexp)
+ (let (pos)
+ (while (and (null pos)
+ (re-search-forward regexp nil t))
+ (when (eq erlang-current-arity (save-excursion (erlang-get-arity)))
+ (setq pos (point-at-bol))))
+ pos))
(defun erlang-xref-find-definitions (identifier &optional is-regexp)
- (let ((id-list (split-string identifier ":")))
- (cond
- ;; Handle "tag"
- ((null (cdr id-list))
- (erlang-xref-find-definitions-tag identifier is-regexp))
- ;; Handle "module:"
- ((string-equal (cadr id-list) "")
- (erlang-xref-find-definitions-module (car id-list)))
- ;; Handle "module:tag"
- (t
- (erlang-xref-find-definitions-module-tag (car id-list)
- (cadr id-list)
- is-regexp)))))
-
-(defun erlang-xref-find-definitions-tag (tag is-regexp)
- "Find all definitions of TAG and reorder them so that
-definitions in the currently visited file comes first."
- (when (fboundp 'etags--xref-find-definitions)
- (let* ((current-file (and (buffer-file-name)
- (file-truename (buffer-file-name))))
- (xrefs (etags--xref-find-definitions tag is-regexp))
- local-xrefs non-local-xrefs)
- (while xrefs
- (if (string-equal (erlang-xref-truename-file (car xrefs))
- current-file)
- (push (car xrefs) local-xrefs)
- (push (car xrefs) non-local-xrefs))
- (setq xrefs (cdr xrefs)))
- (append (reverse local-xrefs)
- (reverse non-local-xrefs)))))
+ (erlang-with-id (kind module name arity) identifier
+ (setq erlang-current-arity arity)
+ (cond ((eq kind 'module)
+ (erlang-xref-find-definitions-module name))
+ (module
+ (erlang-xref-find-definitions-module-tag module
+ name
+ (eq kind
+ 'qualified-function)
+ is-regexp))
+ (t
+ (erlang-xref-find-definitions-tag kind name is-regexp)))))
(defun erlang-xref-find-definitions-module (module)
(and (fboundp 'xref-make)
@@ -5048,17 +5214,58 @@ definitions in the currently visited file comes first."
(setq files (cdr files))))))
(nreverse xrefs))))
-(defun erlang-xref-find-definitions-module-tag (module tag is-regexp)
- "Find all definitions of TAG and filter away definitions
-outside of MODULE."
- (when (fboundp 'etags--xref-find-definitions)
- (let ((xrefs (etags--xref-find-definitions tag is-regexp))
- xrefs-in-module)
- (while xrefs
- (when (string-equal module (erlang-xref-module (car xrefs)))
- (push (car xrefs) xrefs-in-module))
- (setq xrefs (cdr xrefs)))
- xrefs-in-module)))
+
+(defun erlang-xref-find-definitions-module-tag (module
+ tag
+ is-qualified
+ is-regexp)
+ "Find definitions of TAG and filter away definitions outside of
+MODULE. If IS-QUALIFIED is nil and no definitions was found inside
+the MODULE then return any definitions found outside. If
+IS-REGEXP is non-nil then TAG is a regexp."
+ (and (fboundp 'etags--xref-find-definitions)
+ (fboundp 'erlang-convert-xrefs)
+ (let ((xrefs (erlang-convert-xrefs
+ (etags--xref-find-definitions tag is-regexp)))
+ xrefs-in-module)
+ (dolist (xref xrefs)
+ (when (string-equal module (erlang-xref-module xref))
+ (push xref xrefs-in-module)))
+ (cond (is-qualified xrefs-in-module)
+ (xrefs-in-module xrefs-in-module)
+ (t xrefs)))))
+
+(defun erlang-xref-find-definitions-tag (kind tag is-regexp)
+ "Find all definitions of TAG and reorder them so that
+definitions in the currently visited file comes first."
+ (and (fboundp 'etags--xref-find-definitions)
+ (fboundp 'erlang-convert-xrefs)
+ (let* ((current-file (and (buffer-file-name)
+ (file-truename (buffer-file-name))))
+ (regexp (erlang-etags-regexp kind tag is-regexp))
+ (xrefs (erlang-convert-xrefs
+ (etags--xref-find-definitions regexp t)))
+ local-xrefs non-local-xrefs)
+ (while xrefs
+ (let ((xref (car xrefs)))
+ (if (string-equal (erlang-xref-truename-file xref)
+ current-file)
+ (push xref local-xrefs)
+ (push xref non-local-xrefs))
+ (setq xrefs (cdr xrefs))))
+ (append (reverse local-xrefs)
+ (reverse non-local-xrefs)))))
+
+(defun erlang-etags-regexp (kind tag is-regexp)
+ (let ((tag-regexp (if is-regexp
+ tag
+ (regexp-quote tag))))
+ (cond ((eq kind 'record)
+ (concat "-record\\s-*(\\s-*" tag-regexp))
+ ((eq kind 'macro)
+ (concat "-define\\s-*(\\s-*" tag-regexp))
+ (t tag-regexp))))
+
(defun erlang-xref-module (xref)
(erlang-get-module-from-file-name (erlang-xref-file xref)))
@@ -5174,7 +5381,7 @@ future, a new shell on an already running host will be started."
(defvar erlang-shell-mode-hook nil
- "*User functions to run when an Erlang shell is started.
+ "User functions to run when an Erlang shell is started.
This hook is used to change the behaviour of Erlang mode. It is
normally used by the user to personalise the programming environment.
@@ -5190,7 +5397,7 @@ Erlang source file is loaded into Emacs.")
(defvar erlang-input-ring-file-name "~/.erlang_history"
- "*When non-nil, file name used to store Erlang shell history information.")
+ "When non-nil, file name used to store Erlang shell history information.")
(defun erlang-shell-mode ()
@@ -5290,7 +5497,7 @@ Selects Comint or Compilation mode command as appropriate."
;;;
(defvar inferior-erlang-display-buffer-any-frame nil
- "*When nil, `inferior-erlang-display-buffer' use only selected frame.
+ "When nil, `inferior-erlang-display-buffer' use only selected frame.
When t, all frames are searched. When 'raise, the frame is raised.")
(defvar inferior-erlang-shell-type 'newshell
@@ -5303,10 +5510,10 @@ nil, the default shell is used.
This variable influence the setting of other variables.")
(defvar inferior-erlang-machine "erl"
- "*The name of the Erlang shell.")
+ "The name of the Erlang shell.")
(defvar inferior-erlang-machine-options '()
- "*The options used when activating the Erlang shell.
+ "The options used when activating the Erlang shell.
This must be a list of strings.")
@@ -5317,7 +5524,7 @@ This must be a list of strings.")
"The name of the inferior Erlang buffer.")
(defvar inferior-erlang-prompt-timeout 60
- "*Number of seconds before `inferior-erlang-wait-prompt' timeouts.
+ "Number of seconds before `inferior-erlang-wait-prompt' timeouts.
The time specified is waited after every output made by the inferior
Erlang shell. When this variable is t, we assume that we always have
@@ -5383,7 +5590,7 @@ editing control characters:
(setq inferior-erlang-process
(get-buffer-process inferior-erlang-buffer))
- (if (> 21 erlang-emacs-major-version) ; funcalls to avoid compiler warnings
+ (if (> 21 emacs-major-version) ; funcalls to avoid compiler warnings
(funcall (symbol-function 'set-process-query-on-exit-flag)
inferior-erlang-process nil)
(funcall (symbol-function 'process-kill-without-query) inferior-erlang-process))
@@ -5454,7 +5661,7 @@ frame will become deselected before the next command."
(defun inferior-erlang-window (&optional all-frames)
"Return the window containing the inferior Erlang, or nil."
(and (inferior-erlang-running-p)
- (if (and all-frames (>= erlang-emacs-major-version 19))
+ (if (and all-frames (>= emacs-major-version 19))
(get-buffer-window inferior-erlang-buffer t)
(get-buffer-window inferior-erlang-buffer))))
@@ -5551,7 +5758,7 @@ Return the position after the newly inserted command."
(boundp 'comint-last-output-start))
(save-excursion
(goto-char
- (if (erlang-interactive-p)
+ (if (called-interactively-p 'interactive)
(symbol-value 'comint-last-input-end)
(symbol-value 'comint-last-output-start)))
(while (progn (skip-chars-forward "^\C-h")
@@ -5570,7 +5777,7 @@ Return the position after the newly inserted command."
(let ((pmark (process-mark (get-buffer-process (current-buffer)))))
(save-excursion
(goto-char
- (if (erlang-interactive-p)
+ (if (called-interactively-p 'interactive)
(symbol-value 'comint-last-input-end)
(symbol-value 'comint-last-output-start)))
(while (re-search-forward "\r+$" pmark t)
@@ -5938,12 +6145,6 @@ it assumes that NEWDEF is loaded."
(ad-unadvise 'Man-notify-when-ready)
(ad-unadvise 'set-visited-file-name)))))
-
-(defun erlang-string-to-int (string)
- (if (fboundp 'string-to-number)
- (string-to-number string)
- (funcall (symbol-function 'string-to-int) string)))
-
;; The end...
(provide 'erlang)
@@ -5951,7 +6152,7 @@ it assumes that NEWDEF is loaded."
(run-hooks 'erlang-load-hook)
;; Local variables:
-;; coding: iso-8859-1
+;; coding: utf-8
;; indent-tabs-mode: nil
;; End:
diff --git a/lib/tools/emacs/erldoc.el b/lib/tools/emacs/erldoc.el
index cb355374d9..e1fd661348 100644
--- a/lib/tools/emacs/erldoc.el
+++ b/lib/tools/emacs/erldoc.el
@@ -23,8 +23,8 @@
;; Crawl Erlang/OTP HTML documentation and generate lookup tables.
;;
;; This package depends on `cl-lib', `pcase' and
-;; `libxml-parse-html-region'; emacs 24+ compiled with libxml2 should
-;; work. On emacs 24.1 and 24.2 do `M-x package-install RET cl-lib
+;; `libxml-parse-html-region'. Emacs 24+ compiled with libxml2 should
+;; work. On Emacs 24.1 and 24.2 do `M-x package-install RET cl-lib
;; RET' to install `cl-lib'.
;;
;; Please customise `erldoc-man-index' to point to your local OTP
@@ -505,4 +505,10 @@ up the indexing."
(browse-url (cdr (assoc topic (erldoc-user-guides)))))
(provide 'erldoc)
+
+;; Local variables:
+;; coding: utf-8
+;; indent-tabs-mode: nil
+;; End:
+
;;; erldoc.el ends here
diff --git a/lib/typer/src/typer.erl b/lib/typer/src/typer.erl
index 3bff546243..18c4fe902d 100644
--- a/lib/typer/src/typer.erl
+++ b/lib/typer/src/typer.erl
@@ -129,31 +129,22 @@ extract(#analysis{macros = Macros,
%% Process remote types
NewCodeServer =
try
- NewRecords = dialyzer_codeserver:get_temp_records(CodeServer1),
+ CodeServer2 =
+ dialyzer_utils:merge_types(CodeServer1,
+ TrustPLT), % XXX change to the PLT?
NewExpTypes = dialyzer_codeserver:get_temp_exported_types(CodeServer1),
case sets:size(NewExpTypes) of 0 -> ok end,
- OldRecords = dialyzer_plt:get_types(TrustPLT), % XXX change to the PLT?
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
- CodeServer2 = dialyzer_codeserver:set_temp_records(MergedRecords, CodeServer1),
CodeServer3 = dialyzer_codeserver:finalize_exported_types(NewExpTypes, CodeServer2),
- {CodeServer4, RecordDict} =
- dialyzer_utils:process_record_remote_types(CodeServer3),
- dialyzer_contracts:process_contract_remote_types(CodeServer4, RecordDict)
+ CodeServer4 = dialyzer_utils:process_record_remote_types(CodeServer3),
+ dialyzer_contracts:process_contract_remote_types(CodeServer4)
catch
throw:{error, ErrorMsg} ->
compile_error(ErrorMsg)
end,
%% Create TrustPLT
- Contracts = dialyzer_codeserver:get_contracts(NewCodeServer),
- Modules = dict:fetch_keys(Contracts),
- FoldFun =
- fun(Module, TmpPlt) ->
- {ok, ModuleContracts} = dict:find(Module, Contracts),
- SpecList = [{MFA, Contract}
- || {MFA, {_FileLine, Contract}} <- maps:to_list(ModuleContracts)],
- dialyzer_plt:insert_contract_list(TmpPlt, SpecList)
- end,
- NewTrustPLT = lists:foldl(FoldFun, TrustPLT, Modules),
+ ContractsDict = dialyzer_codeserver:get_contracts(NewCodeServer),
+ Contracts = orddict:from_list(dict:to_list(ContractsDict)),
+ NewTrustPLT = dialyzer_plt:insert_contract_list(TrustPLT, Contracts),
Analysis#analysis{trust_plt = NewTrustPLT}.
%%--------------------------------------------------------------------
@@ -835,19 +826,14 @@ collect_info(Analysis) ->
TmpCServer = NewAnalysis#analysis.codeserver,
NewCServer =
try
- NewRecords = dialyzer_codeserver:get_temp_records(TmpCServer),
+ TmpCServer1 = dialyzer_utils:merge_types(TmpCServer, NewPlt),
NewExpTypes = dialyzer_codeserver:get_temp_exported_types(TmpCServer),
- OldRecords = dialyzer_plt:get_types(NewPlt),
OldExpTypes = dialyzer_plt:get_exported_types(NewPlt),
- MergedRecords = dialyzer_utils:merge_records(NewRecords, OldRecords),
MergedExpTypes = sets:union(NewExpTypes, OldExpTypes),
- %% io:format("Merged Records ~p",[MergedRecords]),
- TmpCServer1 = dialyzer_codeserver:set_temp_records(MergedRecords, TmpCServer),
TmpCServer2 =
dialyzer_codeserver:finalize_exported_types(MergedExpTypes, TmpCServer1),
- {TmpCServer3, RecordDict} =
- dialyzer_utils:process_record_remote_types(TmpCServer2),
- dialyzer_contracts:process_contract_remote_types(TmpCServer3, RecordDict)
+ TmpCServer3 = dialyzer_utils:process_record_remote_types(TmpCServer2),
+ dialyzer_contracts:process_contract_remote_types(TmpCServer3)
catch
throw:{error, ErrorMsg} ->
fatal_error(ErrorMsg)
diff --git a/lib/wx/api_gen/gen_util.erl b/lib/wx/api_gen/gen_util.erl
index cd42ad2d96..49a3cb521e 100644
--- a/lib/wx/api_gen/gen_util.erl
+++ b/lib/wx/api_gen/gen_util.erl
@@ -203,7 +203,7 @@ replace_and_remove([$; | R], Acc) ->
replace_and_remove([$@ | R], Acc) ->
replace_and_remove(R, [directive|Acc]);
-replace_and_remove([_E|R], Acc) -> %% Ignore everthing else
+replace_and_remove([_E|R], Acc) -> %% Ignore everything else
replace_and_remove(R, Acc);
replace_and_remove([], Acc) ->
Acc.
diff --git a/lib/wx/api_gen/wx_gen_cpp.erl b/lib/wx/api_gen/wx_gen_cpp.erl
index d4b6db8153..4b208001a0 100644
--- a/lib/wx/api_gen/wx_gen_cpp.erl
+++ b/lib/wx/api_gen/wx_gen_cpp.erl
@@ -627,7 +627,7 @@ decode_arg(N,#type{name="wxArrayString"},Place,A0) ->
w(" int * ~sLen = (int *) bp; bp += 4;~n", [N]),
case Place of
arg -> w(" wxArrayString ~s;~n", [N]);
- opt -> ignore %% Allready declared
+ opt -> ignore %% Already declared
end,
w(" int ~sASz = 0, * ~sTemp;~n", [N,N]),
w(" for(int i=0; i < *~sLen; i++) {~n", [N]),
diff --git a/lib/xmerl/src/xmerl_regexp.erl b/lib/xmerl/src/xmerl_regexp.erl
index fc89b80ff1..566b77725f 100644
--- a/lib/xmerl/src/xmerl_regexp.erl
+++ b/lib/xmerl/src/xmerl_regexp.erl
@@ -1154,7 +1154,7 @@ comp_crs([], Last) -> [{Last,maxchar}].
%% build_dfa(NFA, NfaStartState) -> {DFA,DfaStartState}.
%% Build a DFA from an NFA using "subset construction". The major
%% difference from the book is that we keep the marked and unmarked
-%% DFA states in seperate lists. New DFA states are added to the
+%% DFA states in separate lists. New DFA states are added to the
%% unmarked list and states are marked by moving them to the marked
%% list. We assume that the NFA accepting state numbers are in
%% ascending order for the rules and use ordsets to keep this order.
diff --git a/lib/xmerl/src/xmerl_scan.erl b/lib/xmerl/src/xmerl_scan.erl
index 9f6b27113e..95dc82e5c9 100644
--- a/lib/xmerl/src/xmerl_scan.erl
+++ b/lib/xmerl/src/xmerl_scan.erl
@@ -2309,7 +2309,9 @@ expanded_name(Name, [], #xmlNamespace{default = URI}, S) ->
expanded_name(Name, N = {"xmlns", Local}, #xmlNamespace{nodes = Ns}, S) ->
{_, Value} = lists:keyfind(Local, 1, Ns),
case Name of
- 'xmlns:xml' when Value =/= 'http://www.w3.org/XML/1998/namespace' ->
+ 'xmlns:xml' when Value =:= 'http://www.w3.org/XML/1998/namespace' ->
+ N;
+ 'xmlns:xml' when Value =/= 'http://www.w3.org/XML/1998/namespace' ->
?fatal({xml_prefix_cannot_be_redeclared, Value}, S);
'xmlns:xmlns' ->
?fatal({xmlns_prefix_cannot_be_declared, Value}, S);
@@ -2323,6 +2325,8 @@ expanded_name(Name, N = {"xmlns", Local}, #xmlNamespace{nodes = Ns}, S) ->
N
end
end;
+expanded_name(_Name, {"xml", Local}, _NS, _S) ->
+ {'http://www.w3.org/XML/1998/namespace', list_to_atom(Local)};
expanded_name(_Name, {Prefix, Local}, #xmlNamespace{nodes = Ns}, S) ->
case lists:keysearch(Prefix, 1, Ns) of
{value, {_, URI}} ->
@@ -2333,9 +2337,6 @@ expanded_name(_Name, {Prefix, Local}, #xmlNamespace{nodes = Ns}, S) ->
?fatal({namespace_prefix_not_declared, Prefix}, S)
end.
-
-
-
keyreplaceadd(K, Pos, [H|T], Obj) when K == element(Pos, H) ->
[Obj|T];
keyreplaceadd(K, Pos, [H|T], Obj) ->
diff --git a/lib/xmerl/test/xmerl_SUITE.erl b/lib/xmerl/test/xmerl_SUITE.erl
index cf7c0b7548..58c462483c 100644
--- a/lib/xmerl/test/xmerl_SUITE.erl
+++ b/lib/xmerl/test/xmerl_SUITE.erl
@@ -55,7 +55,7 @@ groups() ->
{misc, [],
[latin1_alias, syntax_bug1, syntax_bug2, syntax_bug3,
pe_ref1, copyright, testXSEIF, export_simple1, export,
- default_attrs_bug]},
+ default_attrs_bug, xml_ns]},
{eventp_tests, [], [sax_parse_and_export]},
{ticket_tests, [],
[ticket_5998, ticket_7211, ticket_7214, ticket_7430,
@@ -237,7 +237,36 @@ default_attrs_bug(Config) ->
{#xmlElement{attributes = [#xmlAttribute{name = b, value = "also explicit"},
#xmlAttribute{name = a, value = "explicit"}]},
[]
- } = xmerl_scan:string(Doc2, [{default_attrs, true}]).
+ } = xmerl_scan:string(Doc2, [{default_attrs, true}]),
+ ok.
+
+
+xml_ns(Config) ->
+ Doc = "<?xml version='1.0'?>\n"
+ "<doc xml:attr1=\"implicit xml ns\"/>",
+ {#xmlElement{namespace=#xmlNamespace{default = [], nodes = []},
+ attributes = [#xmlAttribute{name = 'xml:attr1',
+ expanded_name = {'http://www.w3.org/XML/1998/namespace',attr1},
+ nsinfo = {"xml","attr1"},
+ namespace = #xmlNamespace{default = [], nodes = []}}]},
+ []
+ } = xmerl_scan:string(Doc, [{namespace_conformant, true}]),
+ Doc2 = "<?xml version='1.0'?>\n"
+ "<doc xmlns:xml=\"http://www.w3.org/XML/1998/namespace\" xml:attr1=\"explicit xml ns\"/>",
+ {#xmlElement{namespace=#xmlNamespace{default = [], nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]},
+ attributes = [#xmlAttribute{name = 'xmlns:xml',
+ expanded_name = {"xmlns","xml"},
+ nsinfo = {"xmlns","xml"},
+ namespace = #xmlNamespace{default = [],
+ nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]}},
+ #xmlAttribute{name = 'xml:attr1',
+ expanded_name = {'http://www.w3.org/XML/1998/namespace',attr1},
+ nsinfo = {"xml","attr1"},
+ namespace = #xmlNamespace{default = [],
+ nodes = [{"xml",'http://www.w3.org/XML/1998/namespace'}]}}]},
+ []
+ } = xmerl_scan:string(Doc2, [{namespace_conformant, true}]),
+ ok.
pe_ref1(Config) ->
file:set_cwd(datadir(Config)),