aboutsummaryrefslogtreecommitdiffstats
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/asn1/doc/src/notes.xml34
-rw-r--r--lib/asn1/src/asn1ct_constructed_per.erl6
-rw-r--r--lib/asn1/src/asn1ct_gen.erl48
-rw-r--r--lib/asn1/src/asn1ct_gen_per.erl2
-rw-r--r--lib/asn1/src/asn1rtt_per_common.erl1
-rw-r--r--lib/asn1/test/Makefile1
-rw-r--r--lib/asn1/test/asn1_SUITE.erl21
-rw-r--r--lib/asn1/test/asn1_SUITE_data/EnumN2N.asn122
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ExtensionDefault.asn112
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ImportsFrom.asn13
-rw-r--r--lib/asn1/test/asn1_SUITE_data/ImportsFrom2.asn15
-rw-r--r--lib/asn1/test/testExtensionDefault.erl53
-rw-r--r--lib/asn1/test/testUniqueObjectSets.erl2
-rw-r--r--lib/asn1/test/test_modified_x420.erl2
-rw-r--r--lib/asn1/vsn.mk2
-rw-r--r--lib/common_test/doc/src/notes.xml20
-rw-r--r--lib/common_test/src/ct.erl3
-rw-r--r--lib/common_test/src/ct_make.erl48
-rw-r--r--lib/common_test/src/ct_telnet.erl6
-rw-r--r--lib/common_test/src/ct_testspec.erl4
-rw-r--r--lib/common_test/src/test_server.erl45
-rw-r--r--lib/common_test/src/test_server_ctrl.erl9
-rw-r--r--lib/common_test/src/test_server_node.erl2
-rw-r--r--lib/common_test/src/test_server_sup.erl2
-rw-r--r--lib/common_test/test_server/ts_lib.erl24
-rw-r--r--lib/common_test/test_server/ts_run.erl7
-rw-r--r--lib/common_test/vsn.mk2
-rw-r--r--lib/compiler/doc/src/notes.xml17
-rw-r--r--lib/compiler/src/beam_utils.erl61
-rw-r--r--lib/compiler/src/beam_validator.erl18
-rw-r--r--lib/compiler/src/sys_core_fold.erl6
-rw-r--r--lib/compiler/test/beam_utils_SUITE.erl17
-rw-r--r--lib/compiler/test/core_SUITE.erl8
-rw-r--r--lib/compiler/test/core_SUITE_data/non_variable_apply.core80
-rw-r--r--lib/compiler/vsn.mk2
-rw-r--r--lib/crypto/c_src/crypto.c958
-rw-r--r--lib/crypto/doc/src/crypto.xml23
-rw-r--r--lib/crypto/src/crypto.erl94
-rw-r--r--lib/crypto/test/blowfish_SUITE.erl5
-rw-r--r--lib/crypto/test/crypto_SUITE.erl116
-rw-r--r--lib/debugger/src/dbg_ieval.erl8
-rw-r--r--lib/debugger/src/dbg_wx_break_win.erl4
-rw-r--r--lib/debugger/src/dbg_wx_mon_win.erl4
-rw-r--r--lib/debugger/src/dbg_wx_trace.erl19
-rw-r--r--lib/debugger/src/dbg_wx_win.erl2
-rw-r--r--lib/debugger/src/i.erl8
-rw-r--r--lib/dialyzer/doc/src/notes.xml15
-rw-r--r--lib/dialyzer/src/dialyzer.erl20
-rw-r--r--lib/dialyzer/src/dialyzer_cl_parse.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_contracts.erl5
-rw-r--r--lib/dialyzer/src/dialyzer_gui_wx.erl2
-rw-r--r--lib/dialyzer/src/dialyzer_plt.erl9
-rw-r--r--lib/dialyzer/src/dialyzer_utils.erl22
-rw-r--r--lib/dialyzer/src/typer.erl13
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/loop4
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/map_galore12
-rw-r--r--lib/dialyzer/test/map_SUITE_data/results/opaque_key1
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/loop.erl92
-rw-r--r--lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl2
-rw-r--r--lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type2
-rw-r--r--lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl32
-rw-r--r--lib/dialyzer/vsn.mk2
-rw-r--r--lib/diameter/doc/src/diameter.xml255
-rw-r--r--lib/diameter/doc/src/diameter_app.xml9
-rw-r--r--lib/diameter/doc/src/diameter_codec.xml23
-rw-r--r--lib/diameter/doc/src/diameter_sctp.xml3
-rw-r--r--lib/diameter/doc/src/diameter_tcp.xml13
-rw-r--r--lib/diameter/doc/src/seealso.ent5
-rw-r--r--lib/diameter/doc/standard/rfc7683.txt2355
-rw-r--r--lib/diameter/examples/code/client.erl10
-rw-r--r--lib/diameter/examples/code/client_cb.erl29
-rw-r--r--lib/diameter/examples/code/node.erl29
-rw-r--r--lib/diameter/include/diameter_gen.hrl8
-rw-r--r--lib/diameter/src/Makefile6
-rw-r--r--lib/diameter/src/base/diameter.erl71
-rw-r--r--lib/diameter/src/base/diameter_callback.erl6
-rw-r--r--lib/diameter/src/base/diameter_codec.erl203
-rw-r--r--lib/diameter/src/base/diameter_config.erl279
-rw-r--r--lib/diameter/src/base/diameter_gen.erl989
-rw-r--r--lib/diameter/src/base/diameter_lib.erl2
-rw-r--r--lib/diameter/src/base/diameter_peer.erl6
-rw-r--r--lib/diameter/src/base/diameter_peer_fsm.erl237
-rw-r--r--lib/diameter/src/base/diameter_reg.erl235
-rw-r--r--lib/diameter/src/base/diameter_service.erl99
-rw-r--r--lib/diameter/src/base/diameter_traffic.erl477
-rw-r--r--lib/diameter/src/base/diameter_watchdog.erl39
-rw-r--r--lib/diameter/src/compiler/diameter_codegen.erl17
-rw-r--r--lib/diameter/src/compiler/diameter_dict_util.erl4
-rw-r--r--lib/diameter/src/compiler/diameter_exprecs.erl4
-rw-r--r--lib/diameter/src/dict/doic_rfc7683.dia50
-rw-r--r--lib/diameter/src/modules.mk1
-rw-r--r--lib/diameter/src/transport/diameter_sctp.erl96
-rw-r--r--lib/diameter/src/transport/diameter_tcp.erl241
-rw-r--r--lib/diameter/test/diameter_codec_SUITE.erl3
-rw-r--r--lib/diameter/test/diameter_codec_SUITE_data/diameter_test_unknown.erl3
-rw-r--r--lib/diameter/test/diameter_codec_test.erl8
-rw-r--r--lib/diameter/test/diameter_event_SUITE.erl11
-rw-r--r--lib/diameter/test/diameter_examples_SUITE.erl4
-rw-r--r--lib/diameter/test/diameter_traffic_SUITE.erl1040
-rw-r--r--lib/diameter/test/diameter_transport_SUITE.erl39
-rw-r--r--lib/diameter/test/diameter_util.erl13
-rw-r--r--lib/edoc/src/edoc_doclet.erl2
-rw-r--r--lib/edoc/src/edoc_lib.erl12
-rw-r--r--lib/edoc/src/edoc_run.erl6
-rw-r--r--lib/edoc/src/edoc_types.erl2
-rw-r--r--lib/erl_docgen/priv/xsl/db_html.xsl4
-rw-r--r--lib/erl_docgen/src/docgen_otp_specs.erl6
-rw-r--r--lib/et/src/et_collector.erl34
-rw-r--r--lib/et/src/et_selector.erl8
-rw-r--r--lib/et/src/et_wx_contents_viewer.erl21
-rw-r--r--lib/et/src/et_wx_viewer.erl24
-rw-r--r--lib/eunit/src/eunit_lib.erl22
-rw-r--r--lib/eunit/src/eunit_tty.erl2
-rw-r--r--lib/hipe/cerl/cerl_cconv.erl2
-rw-r--r--lib/hipe/cerl/erl_types.erl105
-rw-r--r--lib/hipe/test/opt_verify_SUITE.erl2
-rw-r--r--lib/hipe/test/opt_verify_SUITE_data/call_elim_test_branches_opt_poss.erl10
-rw-r--r--lib/inets/doc/src/httpc.xml2
-rw-r--r--lib/inets/doc/src/httpd.xml13
-rw-r--r--lib/inets/doc/src/mod_esi.xml68
-rw-r--r--lib/inets/doc/src/notes.xml36
-rw-r--r--lib/inets/src/http_lib/http_uri.erl4
-rw-r--r--lib/inets/src/http_server/httpd_example.erl24
-rw-r--r--lib/inets/src/http_server/httpd_request.erl48
-rw-r--r--lib/inets/src/http_server/httpd_request_handler.erl162
-rw-r--r--lib/inets/src/http_server/httpd_response.erl24
-rw-r--r--lib/inets/src/http_server/httpd_script_env.erl10
-rw-r--r--lib/inets/src/http_server/mod_disk_log.erl27
-rw-r--r--lib/inets/src/http_server/mod_esi.erl40
-rw-r--r--lib/inets/src/http_server/mod_log.erl4
-rw-r--r--lib/inets/src/inets_app/inets.appup.src4
-rw-r--r--lib/inets/test/httpd_SUITE.erl185
-rw-r--r--lib/inets/test/uri_SUITE.erl4
-rw-r--r--lib/inets/vsn.mk2
-rw-r--r--lib/kernel/doc/src/disk_log.xml2
-rw-r--r--lib/kernel/doc/src/inet.xml11
-rw-r--r--lib/kernel/doc/src/notes.xml15
-rw-r--r--lib/kernel/src/error_logger.erl29
-rw-r--r--lib/kernel/src/erts_debug.erl35
-rw-r--r--lib/kernel/src/file_server.erl8
-rw-r--r--lib/kernel/src/global.erl16
-rw-r--r--lib/kernel/src/inet.erl3
-rw-r--r--lib/kernel/src/kernel.app.src2
-rw-r--r--lib/kernel/src/kernel.appup.src4
-rw-r--r--lib/kernel/src/net_kernel.erl2
-rw-r--r--lib/kernel/src/pg2.erl4
-rw-r--r--lib/kernel/src/user_drv.erl31
-rw-r--r--lib/kernel/test/Makefile4
-rw-r--r--lib/kernel/test/code_SUITE.erl50
-rw-r--r--lib/kernel/test/gen_tcp_misc_SUITE.erl44
-rw-r--r--lib/kernel/test/kernel_bench.spec1
-rw-r--r--lib/kernel/test/zlib_SUITE.erl799
-rw-r--r--lib/kernel/vsn.mk2
-rw-r--r--lib/mnesia/doc/src/Mnesia_chap2.xmlsrc2
-rw-r--r--lib/mnesia/src/mnesia.erl4
-rw-r--r--lib/mnesia/src/mnesia_bup.erl4
-rw-r--r--lib/mnesia/src/mnesia_checkpoint.erl12
-rw-r--r--lib/mnesia/src/mnesia_controller.erl32
-rw-r--r--lib/mnesia/src/mnesia_dumper.erl10
-rw-r--r--lib/mnesia/src/mnesia_event.erl12
-rw-r--r--lib/mnesia/src/mnesia_index.erl2
-rw-r--r--lib/mnesia/src/mnesia_late_loader.erl4
-rw-r--r--lib/mnesia/src/mnesia_lib.erl22
-rw-r--r--lib/mnesia/src/mnesia_loader.erl30
-rw-r--r--lib/mnesia/src/mnesia_locker.erl4
-rw-r--r--lib/mnesia/src/mnesia_log.erl40
-rw-r--r--lib/mnesia/src/mnesia_monitor.erl28
-rw-r--r--lib/mnesia/src/mnesia_recover.erl8
-rw-r--r--lib/mnesia/src/mnesia_schema.erl40
-rw-r--r--lib/mnesia/src/mnesia_subscr.erl6
-rw-r--r--lib/mnesia/src/mnesia_text.erl14
-rw-r--r--lib/mnesia/src/mnesia_tm.erl32
-rw-r--r--lib/observer/src/cdv_atom_cb.erl2
-rw-r--r--lib/observer/src/cdv_bin_cb.erl3
-rw-r--r--lib/observer/src/cdv_detail_wx.erl2
-rw-r--r--lib/observer/src/cdv_html_wx.erl8
-rw-r--r--lib/observer/src/cdv_info_wx.erl8
-rw-r--r--lib/observer/src/cdv_multi_wx.erl8
-rw-r--r--lib/observer/src/cdv_proc_cb.erl2
-rw-r--r--lib/observer/src/cdv_table_wx.erl8
-rw-r--r--lib/observer/src/cdv_term_cb.erl8
-rw-r--r--lib/observer/src/cdv_virtual_list_wx.erl14
-rw-r--r--lib/observer/src/cdv_wx.erl2
-rw-r--r--lib/observer/src/crashdump_viewer.erl281
-rw-r--r--lib/observer/src/etop_tr.erl4
-rw-r--r--lib/observer/src/etop_txt.erl38
-rw-r--r--lib/observer/src/multitrace.erl14
-rw-r--r--lib/observer/src/observer_alloc_wx.erl4
-rw-r--r--lib/observer/src/observer_app_wx.erl2
-rw-r--r--lib/observer/src/observer_html_lib.erl35
-rw-r--r--lib/observer/src/observer_lib.erl36
-rw-r--r--lib/observer/src/observer_perf_wx.erl4
-rw-r--r--lib/observer/src/observer_port_wx.erl2
-rw-r--r--lib/observer/src/observer_pro_wx.erl12
-rw-r--r--lib/observer/src/observer_procinfo.erl8
-rw-r--r--lib/observer/src/observer_sys_wx.erl63
-rw-r--r--lib/observer/src/observer_trace_wx.erl49
-rw-r--r--lib/observer/src/observer_traceoptions_wx.erl19
-rw-r--r--lib/observer/src/observer_tv_table.erl55
-rw-r--r--lib/observer/src/observer_tv_wx.erl2
-rw-r--r--lib/observer/src/observer_wx.erl4
-rw-r--r--lib/observer/src/ttb.erl69
-rw-r--r--lib/observer/src/ttb_et.erl12
-rw-r--r--lib/observer/test/Makefile5
-rw-r--r--lib/observer/test/crashdump_helper_unicode.erl22
-rw-r--r--lib/observer/test/crashdump_viewer_SUITE.erl25
-rw-r--r--lib/os_mon/src/disksup.erl2
-rw-r--r--lib/public_key/doc/src/public_key.xml39
-rw-r--r--lib/public_key/include/public_key.hrl3
-rw-r--r--lib/public_key/src/pubkey_crl.erl35
-rw-r--r--lib/public_key/src/pubkey_ssh.erl4
-rw-r--r--lib/public_key/src/public_key.erl159
-rw-r--r--lib/public_key/test/erl_make_certs.erl8
-rw-r--r--lib/reltool/src/reltool.erl4
-rw-r--r--lib/reltool/src/reltool.hrl2
-rw-r--r--lib/reltool/src/reltool_app_win.erl8
-rw-r--r--lib/reltool/src/reltool_fgraph_win.erl2
-rw-r--r--lib/reltool/src/reltool_mod_win.erl6
-rw-r--r--lib/reltool/src/reltool_server.erl39
-rw-r--r--lib/reltool/src/reltool_sys_win.erl22
-rw-r--r--lib/reltool/src/reltool_target.erl39
-rw-r--r--lib/reltool/src/reltool_utils.erl12
-rw-r--r--lib/runtime_tools/doc/src/notes.xml16
-rw-r--r--lib/runtime_tools/src/dbg.erl2
-rw-r--r--lib/runtime_tools/src/observer_backend.erl47
-rw-r--r--lib/runtime_tools/vsn.mk2
-rw-r--r--lib/sasl/doc/src/sasl_app.xml9
-rw-r--r--lib/sasl/src/release_handler.erl10
-rw-r--r--lib/sasl/src/sasl_report.erl81
-rw-r--r--lib/sasl/src/sasl_report_file_h.erl16
-rw-r--r--lib/sasl/src/systools_make.erl6
-rw-r--r--lib/sasl/src/systools_relup.erl4
-rw-r--r--lib/sasl/test/release_handler_SUITE.erl103
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/Makefile.src18
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/ebin/u.app8
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/src/u.erl50
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/src/u_sup.erl38
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/ebin/u.app8
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/ebin/u.appup3
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/src/u.erl55
-rw-r--r--lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/src/u_sup.erl38
-rw-r--r--lib/sasl/test/sasl_report_SUITE.erl40
-rw-r--r--lib/ssh/doc/src/Makefile3
-rw-r--r--lib/ssh/doc/src/configure_algos.xml428
-rw-r--r--lib/ssh/doc/src/notes.xml16
-rw-r--r--lib/ssh/doc/src/ssh.xml101
-rw-r--r--lib/ssh/doc/src/ssh_app.xml4
-rw-r--r--lib/ssh/doc/src/usersguide.xml1
-rw-r--r--lib/ssh/src/ssh.erl22
-rw-r--r--lib/ssh/src/ssh_io.erl8
-rw-r--r--lib/ssh/src/ssh_options.erl272
-rw-r--r--lib/ssh/src/ssh_sftp.erl24
-rw-r--r--lib/ssh/src/ssh_transport.erl19
-rw-r--r--lib/ssh/test/ssh_protocol_SUITE.erl140
-rw-r--r--lib/ssh/test/ssh_sftp_SUITE.erl31
-rw-r--r--lib/ssh/vsn.mk2
-rw-r--r--lib/ssl/src/dtls_connection.erl70
-rw-r--r--lib/ssl/src/dtls_socket.erl6
-rw-r--r--lib/ssl/src/ssl.erl87
-rw-r--r--lib/ssl/src/ssl_alert.erl98
-rw-r--r--lib/ssl/src/ssl_alert.hrl5
-rw-r--r--lib/ssl/src/ssl_cipher.erl4
-rw-r--r--lib/ssl/src/ssl_connection.erl48
-rw-r--r--lib/ssl/src/ssl_handshake.erl20
-rw-r--r--lib/ssl/src/ssl_manager.erl4
-rw-r--r--lib/ssl/src/ssl_pem_cache.erl2
-rw-r--r--lib/ssl/src/ssl_pkix_db.erl15
-rw-r--r--lib/ssl/src/tls_connection.erl6
-rw-r--r--lib/ssl/test/Makefile1
-rw-r--r--lib/ssl/test/erl_make_certs.erl477
-rw-r--r--lib/ssl/test/ssl_ECC_SUITE.erl16
-rw-r--r--lib/ssl/test/ssl_alpn_handshake_SUITE.erl62
-rw-r--r--lib/ssl/test/ssl_basic_SUITE.erl203
-rw-r--r--lib/ssl/test/ssl_certificate_verify_SUITE.erl11
-rw-r--r--lib/ssl/test/ssl_crl_SUITE.erl12
-rw-r--r--lib/ssl/test/ssl_npn_handshake_SUITE.erl9
-rw-r--r--lib/ssl/test/ssl_packet_SUITE.erl68
-rw-r--r--lib/ssl/test/ssl_payload_SUITE.erl9
-rw-r--r--lib/ssl/test/ssl_sni_SUITE.erl79
-rw-r--r--lib/ssl/test/ssl_test_lib.erl215
-rw-r--r--lib/ssl/test/ssl_to_openssl_SUITE.erl227
-rw-r--r--lib/stdlib/doc/src/ets.xml4
-rw-r--r--lib/stdlib/doc/src/lists.xml4
-rw-r--r--lib/stdlib/doc/src/notes.xml32
-rw-r--r--lib/stdlib/doc/src/rand.xml2
-rw-r--r--lib/stdlib/src/array.erl2
-rw-r--r--lib/stdlib/src/c.erl2
-rw-r--r--lib/stdlib/src/dets.erl20
-rw-r--r--lib/stdlib/src/dets_utils.erl4
-rw-r--r--lib/stdlib/src/edlin.erl136
-rw-r--r--lib/stdlib/src/edlin_expand.erl26
-rw-r--r--lib/stdlib/src/erl_lint.erl15
-rw-r--r--lib/stdlib/src/error_logger_file_h.erl36
-rw-r--r--lib/stdlib/src/error_logger_tty_h.erl116
-rw-r--r--lib/stdlib/src/escript.erl2
-rw-r--r--lib/stdlib/src/ets.erl2
-rw-r--r--lib/stdlib/src/gen.erl2
-rw-r--r--lib/stdlib/src/gen_event.erl20
-rw-r--r--lib/stdlib/src/gen_fsm.erl40
-rw-r--r--lib/stdlib/src/gen_server.erl28
-rw-r--r--lib/stdlib/src/gen_statem.erl30
-rw-r--r--lib/stdlib/src/otp_internal.erl18
-rw-r--r--lib/stdlib/src/proc_lib.erl30
-rw-r--r--lib/stdlib/src/qlc.erl2
-rw-r--r--lib/stdlib/src/shell.erl18
-rw-r--r--lib/stdlib/src/slave.erl4
-rw-r--r--lib/stdlib/src/string.erl10
-rw-r--r--lib/stdlib/src/supervisor.erl4
-rw-r--r--lib/stdlib/test/Makefile1
-rw-r--r--lib/stdlib/test/c_SUITE.erl53
-rw-r--r--lib/stdlib/test/edlin_expand_SUITE.erl26
-rw-r--r--lib/stdlib/test/erl_lint_SUITE.erl71
-rw-r--r--lib/stdlib/test/error_logger_h_SUITE.erl10
-rw-r--r--lib/stdlib/test/id_transform_SUITE.erl9
-rw-r--r--lib/stdlib/test/proc_lib_SUITE.erl74
-rw-r--r--lib/stdlib/test/re_SUITE_data/testoutput14
-rw-r--r--lib/stdlib/test/re_SUITE_data/testoutput84
-rw-r--r--lib/stdlib/test/shell_SUITE.erl50
-rw-r--r--lib/stdlib/test/unicode_expand.erl36
-rw-r--r--lib/stdlib/vsn.mk2
-rw-r--r--lib/syntax_tools/src/epp_dodger.erl4
-rw-r--r--lib/syntax_tools/src/erl_comment_scan.erl2
-rw-r--r--lib/syntax_tools/src/erl_tidy.erl24
-rw-r--r--lib/syntax_tools/src/igor.erl20
-rw-r--r--lib/syntax_tools/src/merl.erl6
-rw-r--r--lib/syntax_tools/test/syntax_tools_SUITE.erl6
-rw-r--r--lib/tools/doc/src/lcnt.xml104
-rw-r--r--lib/tools/doc/src/lcnt_chapter.xml9
-rw-r--r--lib/tools/doc/src/notes.xml20
-rw-r--r--lib/tools/doc/src/venn2.fig66
-rw-r--r--lib/tools/doc/src/venn2.gifbin3369 -> 3507 bytes
-rw-r--r--lib/tools/doc/src/xref.xml12
-rw-r--r--lib/tools/emacs/erlang.el177
-rw-r--r--lib/tools/src/eprof.erl30
-rw-r--r--lib/tools/src/fprof.erl36
-rw-r--r--lib/tools/src/lcnt.erl77
-rw-r--r--lib/tools/src/make.erl48
-rw-r--r--lib/tools/src/tools.app.src2
-rw-r--r--lib/tools/src/xref_base.erl47
-rw-r--r--lib/tools/src/xref_parser.yrl6
-rw-r--r--lib/tools/src/xref_reader.erl18
-rw-r--r--lib/tools/src/xref_utils.erl13
-rw-r--r--lib/tools/test/lcnt_SUITE.erl5
-rw-r--r--lib/tools/test/make_SUITE.erl43
-rw-r--r--lib/tools/test/xref_SUITE.erl68
-rw-r--r--lib/tools/vsn.mk2
-rw-r--r--lib/wx/api_gen/README3
-rw-r--r--lib/wx/api_gen/wx_doxygen.conf6
-rw-r--r--lib/wx/api_gen/wx_gen.erl7
-rw-r--r--lib/wx/api_gen/wxapi.conf4
-rw-r--r--lib/wx/c_src/gen/wxe_funcs.cpp6
-rw-r--r--lib/wx/c_src/gen/wxe_macros.h10
-rw-r--r--lib/wx/src/gen/wxGraphicsContext.erl4
-rw-r--r--lib/wx/src/gen/wxe_debug.hrl10
-rw-r--r--lib/wx/src/gen/wxe_funcs.hrl10
-rw-r--r--lib/wx/src/wx_object.erl22
356 files changed, 12290 insertions, 5459 deletions
diff --git a/lib/asn1/doc/src/notes.xml b/lib/asn1/doc/src/notes.xml
index 26640acabc..5399528271 100644
--- a/lib/asn1/doc/src/notes.xml
+++ b/lib/asn1/doc/src/notes.xml
@@ -32,6 +32,40 @@
<p>This document describes the changes made to the asn1 application.</p>
+<section><title>Asn1 5.0.2</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Default values now work in extension for PER, so if you
+ give the atom <c>asn1_DEFAULT</c> instead of a value it
+ will become the default value.</p>
+ <p>
+ Own Id: OTP-13011 Aux Id: ERIERL-60 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Asn1 5.0.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ Fixed compilation error of generated code caused by a
+ missing quotation of function names as part of an
+ external call for encoding.</p>
+ <p>
+ Own Id: OTP-14519 Aux Id: ERIERL-49 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Asn1 5.0</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/asn1/src/asn1ct_constructed_per.erl b/lib/asn1/src/asn1ct_constructed_per.erl
index 3f1be4febb..aff383479b 100644
--- a/lib/asn1/src/asn1ct_constructed_per.erl
+++ b/lib/asn1/src/asn1ct_constructed_per.erl
@@ -985,9 +985,11 @@ gen_enc_components_call1(Gen, TopType, [C|Rest], DynamicEnc, Ext) ->
Imm1;
'OPTIONAL' ->
enc_absent(Gen, Element, [asn1_NOVALUE], Imm1);
- {'DEFAULT',Def} ->
+ {'DEFAULT',Def} when Ext =:= noext ->
DefValues = def_values(Type, Def),
- enc_absent(Gen, Element, DefValues, Imm1)
+ enc_absent(Gen, Element, DefValues, Imm1);
+ {'DEFAULT',_} ->
+ enc_absent(Gen, Element, [asn1_DEFAULT], Imm1)
end,
Imm = case Imm2 of
[] -> [];
diff --git a/lib/asn1/src/asn1ct_gen.erl b/lib/asn1/src/asn1ct_gen.erl
index 838d59a512..806f8420ec 100644
--- a/lib/asn1/src/asn1ct_gen.erl
+++ b/lib/asn1/src/asn1ct_gen.erl
@@ -145,27 +145,37 @@ pgen_n2nconversion(_Erules,#typedef{name=TypeName,typespec=#type{def={'ENUMERATE
pgen_n2nconversion(_Erules,_) ->
true.
-pgen_name2numfunc(_TypeName,[], _) ->
+pgen_name2numfunc(TypeNameAsAtom,Mapping,Ext) when is_atom(TypeNameAsAtom) ->
+ FuncName = list_to_atom("name2num_"++atom_to_list(TypeNameAsAtom)),
+ pgen_name2numfunc1(FuncName,Mapping,Ext).
+
+pgen_name2numfunc1(_FuncName,[], _) ->
true;
-pgen_name2numfunc(TypeName,[{Atom,Number}], extension_marker) ->
- emit(["name2num_",TypeName,"(",{asis,Atom},") ->",Number,";",nl]),
- emit(["name2num_",TypeName,"({asn1_enum, Num}) -> Num.",nl,nl]);
-pgen_name2numfunc(TypeName,[{Atom,Number}], _) ->
- emit(["name2num_",TypeName,"(",{asis,Atom},") ->",Number,".",nl,nl]);
-pgen_name2numfunc(TypeName,[{Atom,Number}|NNRest], EM) ->
- emit(["name2num_",TypeName,"(",{asis,Atom},") ->",Number,";",nl]),
- pgen_name2numfunc(TypeName,NNRest, EM).
-
-pgen_num2namefunc(_TypeName,[], _) ->
+pgen_name2numfunc1(FuncName,[{Atom,Number}], extension_marker) ->
+ emit([{asis,FuncName},"(",{asis,Atom},") ->",Number,";",nl]),
+ emit([{asis,FuncName},"({asn1_enum, Num}) -> Num.",nl,nl]);
+pgen_name2numfunc1(FuncName,[{Atom,Number}], _) ->
+ emit([{asis,FuncName},"(",{asis,Atom},") ->",Number,".",nl,nl]);
+pgen_name2numfunc1(FuncName,[{Atom,Number}|NNRest], EM) ->
+ emit([{asis,FuncName},"(",{asis,Atom},") ->",Number,";",nl]),
+ pgen_name2numfunc1(FuncName,NNRest, EM).
+
+pgen_num2namefunc(TypeNameAsAtom,Mapping,Ext) when is_atom(TypeNameAsAtom) ->
+ FuncName = list_to_atom("num2name_"++atom_to_list(TypeNameAsAtom)),
+ pgen_num2namefunc1(FuncName,Mapping,Ext).
+
+pgen_num2namefunc1(_FuncName,[], _) ->
true;
-pgen_num2namefunc(TypeName,[{Atom,Number}], extension_marker) ->
- emit(["num2name_",TypeName,"(",Number,") ->",{asis,Atom},";",nl]),
- emit(["num2name_",TypeName,"(ExtensionNum) -> {asn1_enum, ExtensionNum}.",nl,nl]);
-pgen_num2namefunc(TypeName,[{Atom,Number}], _) ->
- emit(["num2name_",TypeName,"(",Number,") ->",{asis,Atom},".",nl,nl]);
-pgen_num2namefunc(TypeName,[{Atom,Number}|NNRest], EM) ->
- emit(["num2name_",TypeName,"(",Number,") ->",{asis,Atom},";",nl]),
- pgen_num2namefunc(TypeName,NNRest, EM).
+pgen_num2namefunc1(FuncName,[{Atom,Number}], extension_marker) ->
+ emit([{asis,FuncName},"(",Number,") ->",{asis,Atom},";",nl]),
+ emit([{asis,FuncName},"(ExtensionNum) -> {asn1_enum, ExtensionNum}.",nl,nl]);
+pgen_num2namefunc1(FuncName,[{Atom,Number}], _) ->
+ emit([{asis,FuncName},"(",Number,") ->",{asis,Atom},".",nl,nl]);
+pgen_num2namefunc1(FuncName,[{Atom,Number}|NNRest], EM) ->
+ emit([{asis,FuncName},"(",Number,") ->",{asis,Atom},";",nl]),
+ pgen_num2namefunc1(FuncName,NNRest, EM).
+
+
pgen_objects(_,_,_,[]) ->
true;
diff --git a/lib/asn1/src/asn1ct_gen_per.erl b/lib/asn1/src/asn1ct_gen_per.erl
index 28b4e46b0c..82e9326294 100644
--- a/lib/asn1/src/asn1ct_gen_per.erl
+++ b/lib/asn1/src/asn1ct_gen_per.erl
@@ -101,7 +101,7 @@ gen_encode_user(Erules,D) when is_record(D,typedef) ->
#'Externaltypereference'{module=CurrMod,type=Etype} ->
emit([{asis,enc_func(Etype)},"(Val).",nl]);
#'Externaltypereference'{module=Emod,type=Etype} ->
- emit([{asis,Emod},":",enc_func(Etype),"(Val).",nl])
+ emit([{asis,Emod},":",{asis,enc_func(Etype)},"(Val).",nl])
end.
diff --git a/lib/asn1/src/asn1rtt_per_common.erl b/lib/asn1/src/asn1rtt_per_common.erl
index 2ecc9e4bc7..5b5f47dfee 100644
--- a/lib/asn1/src/asn1rtt_per_common.erl
+++ b/lib/asn1/src/asn1rtt_per_common.erl
@@ -542,6 +542,7 @@ extension_bitmap(_Val, Pos, Limit, Acc) when Pos >= Limit ->
extension_bitmap(Val, Pos, Limit, Acc) ->
Bit = case element(Pos, Val) of
asn1_NOVALUE -> 0;
+ asn1_DEFAULT -> 0;
_ -> 1
end,
extension_bitmap(Val, Pos+1, Limit, (Acc bsl 1) bor Bit).
diff --git a/lib/asn1/test/Makefile b/lib/asn1/test/Makefile
index f4041fa89b..c38d1c6ebd 100644
--- a/lib/asn1/test/Makefile
+++ b/lib/asn1/test/Makefile
@@ -43,6 +43,7 @@ MODULES= \
testChoTypeRefSet \
testConstraints \
testDef \
+ testExtensionDefault \
testOpt \
testSeqDefault \
testSeqExtension \
diff --git a/lib/asn1/test/asn1_SUITE.erl b/lib/asn1/test/asn1_SUITE.erl
index 5fe6945ff2..b98a704e28 100644
--- a/lib/asn1/test/asn1_SUITE.erl
+++ b/lib/asn1/test/asn1_SUITE.erl
@@ -147,6 +147,7 @@ groups() ->
testImport,
testDER,
testDEFAULT,
+ testExtensionDefault,
testMvrasn6,
testContextSwitchingTypes,
testOpenTypeImplicitTag,
@@ -265,7 +266,7 @@ replace_path(PathA, PathB) ->
true = code:add_patha(PathB).
join(Rule, Opts) ->
- string:join([atom_to_list(Rule)|lists:map(fun atom_to_list/1, Opts)], "_").
+ lists:join("_", [atom_to_list(Rule)|lists:map(fun atom_to_list/1, Opts)]).
%%------------------------------------------------------------------------------
%% Test cases
@@ -444,6 +445,12 @@ testDEFAULT(Config, Rule, Opts) ->
testDef:main(Rule),
testSeqSetDefaultVal:main(Rule, Opts).
+testExtensionDefault(Config) ->
+ test(Config, fun testExtensionDefault/3).
+testExtensionDefault(Config, Rule, Opts) ->
+ asn1_test_lib:compile_all(["ExtensionDefault"], Config, [Rule|Opts]),
+ testExtensionDefault:main(Rule).
+
testMaps(Config) ->
test(Config, fun testMaps/3,
[{ber,[maps,no_ok_wrapper]},
@@ -1198,14 +1205,14 @@ testComment(Config) ->
testName2Number(Config) ->
N2NOptions0 = [{n2n,Type} ||
- Type <- ['CauseMisc', 'CauseProtocol',
- 'CauseRadioNetwork',
- 'CauseTransport','CauseNas']],
+ Type <- ['Cause-Misc', 'CauseProtocol']],
N2NOptions = [?NO_MAPS_MODULE|N2NOptions0],
- asn1_test_lib:compile("S1AP-IEs", Config, N2NOptions),
+ asn1_test_lib:compile("EnumN2N", Config, N2NOptions),
- 0 = 'S1AP-IEs':name2num_CauseMisc('control-processing-overload'),
- 'unknown-PLMN' = 'S1AP-IEs':num2name_CauseMisc(5),
+ 0 = 'EnumN2N':'name2num_Cause-Misc'('control-processing-overload'),
+ 'unknown-PLMN' = 'EnumN2N':'num2name_Cause-Misc'(5),
+ 4 = 'EnumN2N':name2num_CauseProtocol('semantic-error'),
+ 'transfer-syntax-error' = 'EnumN2N':num2name_CauseProtocol(0),
%% OTP-10144
%% Test that n2n option generates name2num and num2name functions supporting
diff --git a/lib/asn1/test/asn1_SUITE_data/EnumN2N.asn1 b/lib/asn1/test/asn1_SUITE_data/EnumN2N.asn1
index a724f2f3f5..a610eb6230 100644
--- a/lib/asn1/test/asn1_SUITE_data/EnumN2N.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/EnumN2N.asn1
@@ -1,6 +1,28 @@
EnumN2N DEFINITIONS AUTOMATIC TAGS ::=
BEGIN
+Cause-Misc ::= ENUMERATED {
+ control-processing-overload,
+ not-enough-user-plane-processing-resources,
+ hardware-failure,
+ om-intervention,
+ unspecified,
+ unknown-PLMN,
+...
+}
+
+CauseProtocol ::= ENUMERATED {
+ transfer-syntax-error,
+ abstract-syntax-error-reject,
+ abstract-syntax-error-ignore-and-notify,
+ message-not-compatible-with-receiver-state,
+ semantic-error,
+ abstract-syntax-error-falsely-constructed-message,
+ unspecified,
+ ...
+}
+
+
NoExt ::= ENUMERATED {
blue(0),
red(1),
diff --git a/lib/asn1/test/asn1_SUITE_data/ExtensionDefault.asn1 b/lib/asn1/test/asn1_SUITE_data/ExtensionDefault.asn1
new file mode 100644
index 0000000000..67d9cb6312
--- /dev/null
+++ b/lib/asn1/test/asn1_SUITE_data/ExtensionDefault.asn1
@@ -0,0 +1,12 @@
+ExtensionDefault DEFINITIONS AUTOMATIC TAGS ::=
+
+BEGIN
+
+Message ::= SEQUENCE {
+ id INTEGER (0..5),
+ ...,
+ priority Priority DEFAULT low
+}
+Priority ::= ENUMERATED { low(0), high(1), ... }
+
+END
diff --git a/lib/asn1/test/asn1_SUITE_data/ImportsFrom.asn1 b/lib/asn1/test/asn1_SUITE_data/ImportsFrom.asn1
index 32b8f75dde..dee3cd5048 100644
--- a/lib/asn1/test/asn1_SUITE_data/ImportsFrom.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ImportsFrom.asn1
@@ -1,8 +1,9 @@
ImportsFrom DEFINITIONS AUTOMATIC TAGS ::=
BEGIN
-IMPORTS Int FROM ImportsFrom2;
+IMPORTS Int, Quoted-Seq FROM ImportsFrom2;
i Int ::= 42
+My-Seq ::= Quoted-Seq
END
diff --git a/lib/asn1/test/asn1_SUITE_data/ImportsFrom2.asn1 b/lib/asn1/test/asn1_SUITE_data/ImportsFrom2.asn1
index b0c29d24ae..a8e619e215 100644
--- a/lib/asn1/test/asn1_SUITE_data/ImportsFrom2.asn1
+++ b/lib/asn1/test/asn1_SUITE_data/ImportsFrom2.asn1
@@ -2,6 +2,11 @@ ImportsFrom2 DEFINITIONS AUTOMATIC TAGS ::=
BEGIN
IMPORTS Int FROM ImportsFrom3;
+Quoted-Seq ::= SEQUENCE {
+ x INTEGER(0..17),
+ y INTEGER(0..666)
+}
+
LocalDef ::= OCTET STRING
END
diff --git a/lib/asn1/test/testExtensionDefault.erl b/lib/asn1/test/testExtensionDefault.erl
new file mode 100644
index 0000000000..cc50fa95b8
--- /dev/null
+++ b/lib/asn1/test/testExtensionDefault.erl
@@ -0,0 +1,53 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+%%
+-module(testExtensionDefault).
+
+-export([main/1]).
+
+main(_Erule) ->
+ roundtrip('Message', {'Message',1,low}), %Will be explicitly encoded.
+ roundtrip('Message', {'Message',1,high}),
+ roundtrip('Message', {'Message',1,asn1_DEFAULT}, {'Message',1,low}),
+
+ map_roundtrip('Message', #{id=>1,priority=>low}), %Will be explicitly encoded.
+ map_roundtrip('Message', #{id=>1,priority=>high}),
+ map_roundtrip('Message', #{id=>1}, #{id=>1,priority=>low}),
+ ok.
+
+roundtrip(Type, Value) ->
+ asn1_test_lib:roundtrip('ExtensionDefault', Type, Value).
+
+roundtrip(Type, Value, Expected) ->
+ %% asn1_test_lib:roundtrip/3 will invoke map_roundtrip/3, which will
+ %% not work in this case. Therefore, implement the roundtrip ourselves.
+ M = 'ExtensionDefault',
+ {ok,Enc} = M:encode(Type, Value),
+ {ok,Expected} = M:decode(Type, Enc),
+ ok.
+
+map_roundtrip(Type, Value) ->
+ map_roundtrip(Type, Value, Value).
+
+map_roundtrip(Type, Value, Expected) ->
+ M = 'maps_ExtensionDefault',
+ Enc = M:encode(Type, Value),
+ Expected = M:decode(Type, Enc),
+ ok.
diff --git a/lib/asn1/test/testUniqueObjectSets.erl b/lib/asn1/test/testUniqueObjectSets.erl
index 476d190651..cabdb44a0c 100644
--- a/lib/asn1/test/testUniqueObjectSets.erl
+++ b/lib/asn1/test/testUniqueObjectSets.erl
@@ -60,7 +60,7 @@ main(CaseDir, Rule, Opts) ->
Objs = [gen_obj(I) || {I,_,_} <- D1],
DupObjs = [gen_dup_obj(I, T) || {I,T,_} <- D1],
DupObjRefs0 = [gen_dup_obj_refs(I) || {I,_,_} <- D1],
- DupObjRefs = string:join(DupObjRefs0, " |\n"),
+ DupObjRefs = lists:join(" |\n", DupObjRefs0),
Asn1Spec = 'UniqueObjectSets',
A = ["UniqueObjectSets DEFINITIONS AUTOMATIC TAGS ::=\n",
"BEGIN\n\n",
diff --git a/lib/asn1/test/test_modified_x420.erl b/lib/asn1/test/test_modified_x420.erl
index 6cd9e0e33b..15f7c70978 100644
--- a/lib/asn1/test/test_modified_x420.erl
+++ b/lib/asn1/test/test_modified_x420.erl
@@ -38,7 +38,7 @@ read_pem(File) ->
extract_base64(Binary) ->
- extract_base64_lines(string:tokens(binary_to_list(Binary), "\n")).
+ extract_base64_lines(string:lexemes(binary_to_list(Binary), "\n")).
extract_base64_lines(["-----BEGIN"++_ | Lines]) ->
take_base64_lines(Lines, _Acc = []);
diff --git a/lib/asn1/vsn.mk b/lib/asn1/vsn.mk
index 7329a9f879..5900f3037e 100644
--- a/lib/asn1/vsn.mk
+++ b/lib/asn1/vsn.mk
@@ -1 +1 @@
-ASN1_VSN = 5.0
+ASN1_VSN = 5.0.2
diff --git a/lib/common_test/doc/src/notes.xml b/lib/common_test/doc/src/notes.xml
index 28b2d44168..37a1846160 100644
--- a/lib/common_test/doc/src/notes.xml
+++ b/lib/common_test/doc/src/notes.xml
@@ -33,6 +33,26 @@
<file>notes.xml</file>
</header>
+<section><title>Common_Test 1.15.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ In OTP-20.0, the behavior of c, make, and ct_make was
+ changed so that in some cases the beam files by default
+ would be written to the directory where the source files
+ were found. This is now changed back to the old behavior
+ so beam files are by default written to current
+ directory.</p>
+ <p>
+ Own Id: OTP-14489 Aux Id: ERL-438 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Common_Test 1.15</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/common_test/src/ct.erl b/lib/common_test/src/ct.erl
index 19b0ee20fe..a12c0c9101 100644
--- a/lib/common_test/src/ct.erl
+++ b/lib/common_test/src/ct.erl
@@ -848,7 +848,8 @@ capture_get([ExclCat | ExclCategories]) ->
Strs = test_server:capture_get(),
CatsStr = [atom_to_list(ExclCat) |
[[$| | atom_to_list(EC)] || EC <- ExclCategories]],
- {ok,MP} = re:compile("<div class=\"(" ++ lists:flatten(CatsStr) ++ ")\">.*"),
+ {ok,MP} = re:compile("<div class=\"(" ++ lists:flatten(CatsStr) ++ ")\">.*",
+ [unicode]),
lists:flatmap(fun(Str) ->
case re:run(Str, MP) of
{match,_} -> [];
diff --git a/lib/common_test/src/ct_make.erl b/lib/common_test/src/ct_make.erl
index 4d66796b83..220cb0473d 100644
--- a/lib/common_test/src/ct_make.erl
+++ b/lib/common_test/src/ct_make.erl
@@ -280,15 +280,47 @@ recompile(File, NoExec, Load, Opts) ->
do_recompile(_File, true, _Load, _Opts) ->
out_of_date;
-do_recompile(File, false, noload, Opts) ->
+do_recompile(File, false, Load, Opts) ->
io:format("Recompile: ~ts\n",[File]),
- compile:file(File, [report_errors, report_warnings, error_summary |Opts]);
-do_recompile(File, false, load, Opts) ->
- io:format("Recompile: ~ts\n",[File]),
- c:c(File, Opts);
-do_recompile(File, false, netload, Opts) ->
- io:format("Recompile: ~ts\n",[File]),
- c:nc(File, Opts).
+ case compile:file(File, [report_errors, report_warnings |Opts]) of
+ Ok when is_tuple(Ok), element(1,Ok)==ok ->
+ maybe_load(element(2,Ok), Load, Opts);
+ _Error ->
+ error
+ end.
+
+maybe_load(_Mod, noload, _Opts) ->
+ ok;
+maybe_load(Mod, Load, Opts) ->
+ %% We have compiled File with options Opts. Find out where the
+ %% output file went to, and load it.
+ case compile:output_generated(Opts) of
+ true ->
+ Dir = proplists:get_value(outdir,Opts,"."),
+ do_load(Dir, Mod, Load);
+ false ->
+ io:format("** Warning: No object file created - nothing loaded **~n"),
+ ok
+ end.
+
+do_load(Dir, Mod, load) ->
+ code:purge(Mod),
+ case code:load_abs(filename:join(Dir, Mod),Mod) of
+ {module,Mod} ->
+ {ok,Mod};
+ Other ->
+ Other
+ end;
+do_load(Dir, Mod, netload) ->
+ Obj = atom_to_list(Mod) ++ code:objfile_extension(),
+ Fname = filename:join(Dir, Obj),
+ case file:read_file(Fname) of
+ {ok,Bin} ->
+ rpc:eval_everywhere(code,load_binary,[Mod,Fname,Bin]),
+ {ok,Mod};
+ Other ->
+ Other
+ end.
exists(File) ->
case file:read_file_info(File) of
diff --git a/lib/common_test/src/ct_telnet.erl b/lib/common_test/src/ct_telnet.erl
index 14d9d381da..b50cddd492 100644
--- a/lib/common_test/src/ct_telnet.erl
+++ b/lib/common_test/src/ct_telnet.erl
@@ -1455,7 +1455,7 @@ match_line(Name,Pid,Line,[{prompt,PromptType}|Patterns],FoundPrompt,Term,
when PromptType=/=FoundPrompt ->
match_line(Name,Pid,Line,Patterns,FoundPrompt,Term,EO,RetTag);
match_line(Name,Pid,Line,[{Tag,Pattern}|Patterns],FoundPrompt,Term,EO,RetTag) ->
- case re:run(Line,Pattern,[{capture,all,list}]) of
+ case re:run(Line,Pattern,[{capture,all,list},unicode]) of
nomatch ->
match_line(Name,Pid,Line,Patterns,FoundPrompt,Term,EO,RetTag);
{match,Match} ->
@@ -1463,7 +1463,7 @@ match_line(Name,Pid,Line,[{Tag,Pattern}|Patterns],FoundPrompt,Term,EO,RetTag) ->
{RetTag,{Tag,Match}}
end;
match_line(Name,Pid,Line,[Pattern|Patterns],FoundPrompt,Term,EO,RetTag) ->
- case re:run(Line,Pattern,[{capture,all,list}]) of
+ case re:run(Line,Pattern,[{capture,all,list},unicode]) of
nomatch ->
match_line(Name,Pid,Line,Patterns,FoundPrompt,Term,EO,RetTag);
{match,Match} ->
@@ -1575,7 +1575,7 @@ split_lines([],Line,Lines) ->
match_prompt(Str,Prx) ->
match_prompt(Str,Prx,[]).
match_prompt(Str,Prx,Acc) ->
- case re:run(Str,Prx) of
+ case re:run(Str,Prx,[unicode]) of
nomatch ->
noprompt;
{match,[{Start,Len}]} ->
diff --git a/lib/common_test/src/ct_testspec.erl b/lib/common_test/src/ct_testspec.erl
index 09839bd35d..bb445bb0d2 100644
--- a/lib/common_test/src/ct_testspec.erl
+++ b/lib/common_test/src/ct_testspec.erl
@@ -537,7 +537,7 @@ replace_names_in_elems([],Modified,_Defs) ->
replace_names_in_string(Term,Defs=[{Name,Replacement=[Ch|_]}|Ds])
when is_integer(Ch) ->
try re:replace(Term,[$'|atom_to_list(Name)]++"'",
- Replacement,[{return,list}]) of
+ Replacement,[{return,list},unicode]) of
Term -> % no match, proceed
replace_names_in_string(Term,Ds);
Term1 ->
@@ -569,7 +569,7 @@ replace_names_in_node1(NodeStr,Defs=[{Name,Replacement}|Ds]) ->
replace_names_in_node1(NodeStr,Ds);
true ->
case re:replace(NodeStr,atom_to_list(Name),
- ReplStr,[{return,list}]) of
+ ReplStr,[{return,list},unicode]) of
NodeStr -> % no match, proceed
replace_names_in_node1(NodeStr,Ds);
NodeStr1 ->
diff --git a/lib/common_test/src/test_server.erl b/lib/common_test/src/test_server.erl
index ee3a5e4bba..dc6b7a536c 100644
--- a/lib/common_test/src/test_server.erl
+++ b/lib/common_test/src/test_server.erl
@@ -21,7 +21,7 @@
-define(DEFAULT_TIMETRAP_SECS, 60).
%%% TEST_SERVER_CTRL INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
--export([run_test_case_apply/1,init_target_info/0]).
+-export([run_test_case_apply/1,init_target_info/0,init_valgrind/0]).
-export([cover_compile/1,cover_analyse/2]).
%%% TEST_SERVER_SUP INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -49,6 +49,10 @@
-export([break/1,break/2,break/3,continue/0,continue/1]).
+%%% DEBUGGER INTERFACE %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+-export([valgrind_new_leaks/0, valgrind_format/2,
+ is_valgrind/0]).
+
%%% PRIVATE EXPORTED %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-export([]).
@@ -69,6 +73,10 @@ init_target_info() ->
username=test_server_sup:get_username(),
cookie=atom_to_list(erlang:get_cookie())}.
+init_valgrind() ->
+ valgrind_new_leaks().
+
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% cover_compile(#cover{app=App,incl=Include,excl=Exclude,cross=Cross}) ->
%% {ok,#cover{mods=AnalyseModules}} | {error,Reason}
@@ -358,11 +366,12 @@ stick_all_sticky(Node,Sticky) ->
%% compensate timetraps for runtime delays introduced by e.g. tools like
%% cover.
-run_test_case_apply({Mod,Func,Args,Name,RunInit,TimetrapData}) ->
+run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,TimetrapData}) ->
case is_valgrind() of
false ->
ok;
true ->
+ valgrind_format("Test case #~w ~w:~w/1", [CaseNum, Mod, Func]),
os:putenv("VALGRIND_LOGFILE_INFIX",atom_to_list(Mod)++"."++
atom_to_list(Func)++"-")
end,
@@ -370,6 +379,7 @@ run_test_case_apply({Mod,Func,Args,Name,RunInit,TimetrapData}) ->
Result = run_test_case_apply(Mod, Func, Args, Name, RunInit,
TimetrapData),
ProcAft = erlang:system_info(process_count),
+ valgrind_new_leaks(),
DetFail = get(test_server_detected_fail),
{Result,DetFail,ProcBef,ProcAft}.
@@ -2735,11 +2745,36 @@ is_commercial() ->
%%
%% Returns true if valgrind is running, else false
is_valgrind() ->
- case os:getenv("TS_RUN_VALGRIND") of
- false -> false;
- _ -> true
+ case catch erlang:system_info({valgrind, running}) of
+ {'EXIT', _} -> false;
+ Res -> Res
end.
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% DEBUGGER INTERFACE %%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% valgrind_new_leaks() -> ok
+%%
+%% Checks for new memory leaks if Valgrind is active.
+valgrind_new_leaks() ->
+ catch erlang:system_info({valgrind, memory}),
+ ok.
+
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
+%% valgrind_format(Format, Args) -> ok
+%% Format = string()
+%% Args = lists()
+%%
+%% Outputs the formatted string to Valgrind's logfile,if Valgrind is active.
+valgrind_format(Format, Args) ->
+ (catch erlang:system_info({valgrind, io_lib:format(Format, Args)})),
+ ok.
+
+
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%
%% Apply given function and reply to caller or proxy.
diff --git a/lib/common_test/src/test_server_ctrl.erl b/lib/common_test/src/test_server_ctrl.erl
index 9412c43187..71978c7267 100644
--- a/lib/common_test/src/test_server_ctrl.erl
+++ b/lib/common_test/src/test_server_ctrl.erl
@@ -2163,6 +2163,7 @@ do_add_end_per_suite_and_skip(LastMod, LastRef, Mod, FwMod) ->
%% Runs the specified tests, then displays/logs the summary.
run_test_cases(TestSpec, Config, TimetrapData) ->
+ test_server:init_valgrind(),
case lists:member(no_src, get(test_server_logopts)) of
true ->
ok;
@@ -3796,7 +3797,7 @@ run_test_case1(Ref, Num, Mod, Func, Args, RunInit,
%% run the test case
{Result,DetectedFail,ProcsBefore,ProcsAfter} =
- run_test_case_apply(Mod, Func, [UpdatedArgs], GrName,
+ run_test_case_apply(Num, Mod, Func, [UpdatedArgs], GrName,
RunInit, TimetrapData),
{Time,RetVal,Loc,Opts,Comment} =
case Result of
@@ -4366,7 +4367,7 @@ do_format_exception(Reason={Error,Stack}) ->
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% run_test_case_apply(Mod, Func, Args, Name, RunInit,
+%% run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit,
%% TimetrapData) ->
%% {{Time,RetVal,Loc,Opts,Comment},DetectedFail,ProcessesBefore,ProcessesAfter} |
%% {{died,Reason,unknown,Comment},DetectedFail,ProcessesBefore,ProcessesAfter}
@@ -4380,9 +4381,9 @@ do_format_exception(Reason={Error,Stack}) ->
%% ProcessesBefore = ProcessesAfter = integer()
%%
-run_test_case_apply(Mod, Func, Args, Name, RunInit,
+run_test_case_apply(CaseNum, Mod, Func, Args, Name, RunInit,
TimetrapData) ->
- test_server:run_test_case_apply({Mod,Func,Args,Name,RunInit,
+ test_server:run_test_case_apply({CaseNum,Mod,Func,Args,Name,RunInit,
TimetrapData}).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/common_test/src/test_server_node.erl b/lib/common_test/src/test_server_node.erl
index a18ff1fd62..c0d7e12721 100644
--- a/lib/common_test/src/test_server_node.erl
+++ b/lib/common_test/src/test_server_node.erl
@@ -692,7 +692,7 @@ find_rel_suse_2(Rel, RootWc) ->
case file:list_dir(RelDir) of
{ok,Dirs} ->
case lists:filter(fun(Dir) ->
- case re:run(Dir, Pat) of
+ case re:run(Dir, Pat, [unicode]) of
nomatch -> false;
_ -> true
end
diff --git a/lib/common_test/src/test_server_sup.erl b/lib/common_test/src/test_server_sup.erl
index 9a26de4774..21f4be22fe 100644
--- a/lib/common_test/src/test_server_sup.erl
+++ b/lib/common_test/src/test_server_sup.erl
@@ -346,7 +346,7 @@ check_appup_clauses_plausible([], _Direction, _Modules) ->
ok;
check_appup_clauses_plausible([{Re, Instrs} | Rest], Direction, Modules)
when is_binary(Re) ->
- case re:compile(Re) of
+ case re:compile(Re,[unicode]) of
{ok, _} ->
case check_appup_instructions(Instrs, Direction, Modules) of
ok ->
diff --git a/lib/common_test/test_server/ts_lib.erl b/lib/common_test/test_server/ts_lib.erl
index a7be740c5c..ea039a2c2b 100644
--- a/lib/common_test/test_server/ts_lib.erl
+++ b/lib/common_test/test_server/ts_lib.erl
@@ -120,7 +120,8 @@ specs(Dir) ->
[]
end
end, Specs),
- sort_tests(MainSpecs).
+
+ sort_tests(filter_tests(MainSpecs)).
test_categories(Dir, App) ->
Specs = filelib:wildcard(filename:join([filename:dirname(Dir),
@@ -141,10 +142,29 @@ suites(Dir, App) ->
"*_SUITE.erl"]),
Suites=filelib:wildcard(Glob),
[filename_to_atom(Name) || Name <- Suites].
-
+
filename_to_atom(Name) ->
list_to_atom(filename:rootname(filename:basename(Name))).
+%% Filter out tests of applications that are not accessible
+
+filter_tests(Tests) ->
+ lists:filter(
+ fun(Special) when Special == epmd;
+ Special == emulator;
+ Special == system ->
+ true;
+ (Test) ->
+ case application:load(filename_to_atom(Test)) of
+ {error, {already_loaded, _}} ->
+ true;
+ {error,_NoSuchApplication} ->
+ false;
+ _ ->
+ true
+ end
+ end, Tests).
+
%% Sorts a list of either log files directories or spec files.
sort_tests(Tests) ->
diff --git a/lib/common_test/test_server/ts_run.erl b/lib/common_test/test_server/ts_run.erl
index 82ae44ec06..e22fa8d196 100644
--- a/lib/common_test/test_server/ts_run.erl
+++ b/lib/common_test/test_server/ts_run.erl
@@ -96,6 +96,9 @@ ct_run_test(Dir, CommonTestArgs) ->
case ct:run_test(CommonTestArgs) of
{_,_,_} ->
ok;
+ {error,{make_failed, _Modules} = Error} ->
+ io:format("ERROR: ~P\n", [Error,20]),
+ erlang:halt(123, [{flush,false}]);
{error,Error} ->
io:format("ERROR: ~P\n", [Error,20]);
Other ->
@@ -288,6 +291,10 @@ tricky_print_data(Port, Timeout) ->
receive
{Port, {exit_status, 0}} ->
ok;
+ {Port, {exit_status, 123 = N}} ->
+ io:format(user, "Test run exited with status ~p,"
+ "aborting rest of test~n", [N]),
+ erlang:halt(123, [{flush,false}]);
{Port, {exit_status, N}} ->
io:format(user, "Test run exited with status ~p~n", [N])
after 1 ->
diff --git a/lib/common_test/vsn.mk b/lib/common_test/vsn.mk
index a219aa4736..9fc3f7f797 100644
--- a/lib/common_test/vsn.mk
+++ b/lib/common_test/vsn.mk
@@ -1 +1 @@
-COMMON_TEST_VSN = 1.15
+COMMON_TEST_VSN = 1.15.1
diff --git a/lib/compiler/doc/src/notes.xml b/lib/compiler/doc/src/notes.xml
index f3d42a909b..bc335a9eaa 100644
--- a/lib/compiler/doc/src/notes.xml
+++ b/lib/compiler/doc/src/notes.xml
@@ -32,6 +32,23 @@
<p>This document describes the changes made to the Compiler
application.</p>
+<section><title>Compiler 7.1.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>Fail labels on guard BIFs weren't taken into account
+ during an optimization pass, and a bug in the validation
+ pass sometimes prevented this from being noticed when a
+ fault occurred.</p>
+ <p>
+ Own Id: OTP-14522 Aux Id: ERIERL-48 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Compiler 7.1</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/compiler/src/beam_utils.erl b/lib/compiler/src/beam_utils.erl
index cc6e54ca16..e39fbdc3b7 100644
--- a/lib/compiler/src/beam_utils.erl
+++ b/lib/compiler/src/beam_utils.erl
@@ -789,39 +789,48 @@ live_opt([{recv_mark,_}=I|Is], Regs, D, Acc) ->
live_opt([], _, _, Acc) -> Acc.
-live_opt_block([{set,Ds,Ss,Op}=I0|Is], Regs0, D, Acc) ->
+live_opt_block([{set,Ds,Ss,Op0}|Is], Regs0, D, Acc) ->
Regs1 = x_live(Ss, x_dead(Ds, Regs0)),
- {I,Regs} = case Op of
- {alloc,Live0,Alloc} ->
- %% The life-time analysis used by the code generator
- %% is sometimes too conservative, so it may be
- %% possible to lower the number of live registers
- %% based on the exact liveness information.
- %% The main benefit is that more optimizations that
- %% depend on liveness information (such as the
- %% beam_bool and beam_dead passes) may be applied.
- Live = live_regs(Regs1),
- true = Live =< Live0, %Assertion.
- I1 = {set,Ds,Ss,{alloc,Live,Alloc}},
- {I1,live_call(Live)};
- _ ->
- {I0,Regs1}
- end,
+ {Op, Regs} = live_opt_block_op(Op0, Regs1, D),
+ I = {set, Ds, Ss, Op},
+
case Ds of
- [{x,X}] ->
- case (not is_live(X, Regs0)) andalso Op =:= move of
- true ->
- live_opt_block(Is, Regs0, D, Acc);
- false ->
- live_opt_block(Is, Regs, D, [I|Acc])
- end;
- _ ->
- live_opt_block(Is, Regs, D, [I|Acc])
+ [{x,X}] ->
+ case (not is_live(X, Regs0)) andalso Op =:= move of
+ true ->
+ live_opt_block(Is, Regs0, D, Acc);
+ false ->
+ live_opt_block(Is, Regs, D, [I|Acc])
+ end;
+ _ ->
+ live_opt_block(Is, Regs, D, [I|Acc])
end;
live_opt_block([{'%live',_,_}|Is], Regs, D, Acc) ->
live_opt_block(Is, Regs, D, Acc);
live_opt_block([], Regs, _, Acc) -> {Acc,Regs}.
+live_opt_block_op({alloc,Live0,AllocOp}, Regs0, D) ->
+ Regs =
+ case AllocOp of
+ {Kind, _N, Fail} when Kind =:= gc_bif; Kind =:= put_map ->
+ live_join_label(Fail, D, Regs0);
+ _ ->
+ Regs0
+ end,
+
+ %% The life-time analysis used by the code generator is sometimes too
+ %% conservative, so it may be possible to lower the number of live
+ %% registers based on the exact liveness information. The main benefit is
+ %% that more optimizations that depend on liveness information (such as the
+ %% beam_bool and beam_dead passes) may be applied.
+ Live = live_regs(Regs),
+ true = Live =< Live0,
+ {{alloc,Live,AllocOp}, live_call(Live)};
+live_opt_block_op({bif,_N,Fail} = Op, Regs, D) ->
+ {Op, live_join_label(Fail, D, Regs)};
+live_opt_block_op(Op, Regs, _D) ->
+ {Op, Regs}.
+
live_join_labels([{f,L}|T], D, Regs0) when L =/= 0 ->
Regs = gb_trees:get(L, D) bor Regs0,
live_join_labels(T, D, Regs);
diff --git a/lib/compiler/src/beam_validator.erl b/lib/compiler/src/beam_validator.erl
index f726625510..00901077d3 100644
--- a/lib/compiler/src/beam_validator.erl
+++ b/lib/compiler/src/beam_validator.erl
@@ -928,9 +928,9 @@ verify_call_match_context(Lbl, Ctx, #vst{ft=Ft}) ->
error({unsuitable_bs_start_match2,I})
end.
-allocate(Zero, Stk, Heap, Live, #vst{current=#st{numy=none}=St}=Vst0) ->
+allocate(Zero, Stk, Heap, Live, #vst{current=#st{numy=none}}=Vst0) ->
verify_live(Live, Vst0),
- Vst = prune_x_regs(Live, Vst0),
+ Vst = #vst{current=St} = prune_x_regs(Live, Vst0),
Ys = init_regs(Stk, case Zero of
true -> initialized;
false -> uninitialized
@@ -1430,13 +1430,13 @@ merge_types(bool, {atom,A}) ->
merge_bool(A);
merge_types({atom,A}, bool) ->
merge_bool(A);
-merge_types(#ms{id=Id,valid=B0,slots=Slots}=M,
- #ms{id=Id,valid=B1,slots=Slots}) ->
- M#ms{valid=B0 bor B1,slots=Slots};
-merge_types(#ms{}=M, _) ->
- M;
-merge_types(_, #ms{}=M) ->
- M;
+merge_types(#ms{id=Id1,valid=B0,slots=Slots},
+ #ms{id=Id2,valid=B1,slots=Slots}) ->
+ Id = if
+ Id1 =:= Id2 -> Id1;
+ true -> make_ref()
+ end,
+ #ms{id=Id,valid=B0 band B1,slots=Slots};
merge_types(T1, T2) when T1 =/= T2 ->
%% Too different. All we know is that the type is a 'term'.
term.
diff --git a/lib/compiler/src/sys_core_fold.erl b/lib/compiler/src/sys_core_fold.erl
index e0cd6da06f..d73060fb7e 100644
--- a/lib/compiler/src/sys_core_fold.erl
+++ b/lib/compiler/src/sys_core_fold.erl
@@ -395,10 +395,10 @@ expr(#c_receive{clauses=Cs0,timeout=T0,action=A0}=Recv, Ctxt, Sub) ->
expr(#c_apply{anno=Anno,op=Op0,args=As0}=App, _, Sub) ->
Op1 = expr(Op0, value, Sub),
As1 = expr_list(As0, value, Sub),
- case Op1 of
- #c_var{} ->
+ case cerl:is_data(Op1) of
+ false ->
App#c_apply{op=Op1,args=As1};
- _ ->
+ true ->
add_warning(App, invalid_call),
Err = #c_call{anno=Anno,
module=#c_literal{val=erlang},
diff --git a/lib/compiler/test/beam_utils_SUITE.erl b/lib/compiler/test/beam_utils_SUITE.erl
index a3f1bb93fe..710cb050d4 100644
--- a/lib/compiler/test/beam_utils_SUITE.erl
+++ b/lib/compiler/test/beam_utils_SUITE.erl
@@ -260,6 +260,14 @@ otp_8949_b(A, B) ->
liveopt(_Config) ->
F = liveopt_fun(42, pebkac, user),
void = F(42, #alarmInfo{type=sctp,cause=pebkac,origin=user}),
+
+
+ A = {#alarmInfo{cause = {abc, def}}, ghi},
+ A = liveopt_guard_bif(A),
+
+ B = {#alarmInfo{cause = {abc}}, def},
+ {#alarmInfo{cause = {{abc}}}, def} = liveopt_guard_bif(B),
+
ok.
liveopt_fun(Peer, Cause, Origin) ->
@@ -271,6 +279,15 @@ liveopt_fun(Peer, Cause, Origin) ->
void
end.
+liveopt_guard_bif({#alarmInfo{cause=F}=R, X}=A) ->
+ %% ERIERL-48
+ if
+ is_tuple(F), tuple_size(F) == 2 -> A;
+ true ->
+ R2 = R#alarmInfo{cause={F}},
+ {R2,X}
+ end.
+
%% Thanks to QuickCheck.
coverage(_Config) ->
42+7 = merchant([[],7,false]),
diff --git a/lib/compiler/test/core_SUITE.erl b/lib/compiler/test/core_SUITE.erl
index f8839da42f..0e07e8dd2e 100644
--- a/lib/compiler/test/core_SUITE.erl
+++ b/lib/compiler/test/core_SUITE.erl
@@ -28,7 +28,8 @@
map_core_test/1,eval_case/1,bad_boolean_guard/1,
bs_shadowed_size_var/1,
cover_v3_kernel_1/1,cover_v3_kernel_2/1,cover_v3_kernel_3/1,
- cover_v3_kernel_4/1,cover_v3_kernel_5/1]).
+ cover_v3_kernel_4/1,cover_v3_kernel_5/1,
+ non_variable_apply/1]).
-include_lib("common_test/include/ct.hrl").
@@ -56,7 +57,8 @@ groups() ->
map_core_test,eval_case,bad_boolean_guard,
bs_shadowed_size_var,
cover_v3_kernel_1,cover_v3_kernel_2,cover_v3_kernel_3,
- cover_v3_kernel_4,cover_v3_kernel_5
+ cover_v3_kernel_4,cover_v3_kernel_5,
+ non_variable_apply
]}].
@@ -90,7 +92,7 @@ end_per_group(_GroupName, Config) ->
?comp(cover_v3_kernel_3).
?comp(cover_v3_kernel_4).
?comp(cover_v3_kernel_5).
-
+?comp(non_variable_apply).
try_it(Mod, Conf) ->
Src = filename:join(proplists:get_value(data_dir, Conf),
diff --git a/lib/compiler/test/core_SUITE_data/non_variable_apply.core b/lib/compiler/test/core_SUITE_data/non_variable_apply.core
new file mode 100644
index 0000000000..d9322cc455
--- /dev/null
+++ b/lib/compiler/test/core_SUITE_data/non_variable_apply.core
@@ -0,0 +1,80 @@
+module 'non_variable_apply' ['module_info'/0,
+ 'module_info'/1,
+ 'non_variable_apply'/0]
+ attributes []
+
+'non_variable_apply'/0 =
+ %% Line 4
+ fun () ->
+ case <> of
+ <> when 'true' ->
+ let <OkFun> =
+ fun (_@c0) ->
+ %% Line 5
+ case _@c0 of
+ <'ok'> when 'true' ->
+ 'ok'
+ ( <_@c1> when 'true' ->
+ ( primop 'match_fail'
+ ({'function_clause',_@c1})
+ -| [{'function_name',{'-non_variable_apply/0-fun-0-',1}}] )
+ -| ['compiler_generated'] )
+ end
+ in let <F> =
+ fun (_@c5,_@c4) ->
+ %% Line 6
+ case <_@c5,_@c4> of
+ <F,X> when 'true' ->
+ apply apply 'id'/1 (F) (X)
+ ( <_@c7,_@c6> when 'true' ->
+ ( primop 'match_fail'
+ ({'function_clause',_@c7,_@c6})
+ -| [{'function_name',{'-non_variable_apply/0-fun-1-',2}}] )
+ -| ['compiler_generated'] )
+ end
+ in %% Line 9
+ apply F
+ (OkFun, 'ok')
+ ( <> when 'true' ->
+ ( primop 'match_fail'
+ ({'function_clause'})
+ -| [{'function_name',{'non_variable_apply',0}}] )
+ -| ['compiler_generated'] )
+ end
+'id'/1 =
+ %% Line 11
+ fun (_@c0) ->
+ case _@c0 of
+ <I> when 'true' ->
+ I
+ ( <_@c1> when 'true' ->
+ ( primop 'match_fail'
+ ({'function_clause',_@c1})
+ -| [{'function_name',{'id',1}}] )
+ -| ['compiler_generated'] )
+ end
+'module_info'/0 =
+ fun () ->
+ case <> of
+ <> when 'true' ->
+ call 'erlang':'get_module_info'
+ ('non_variable_apply')
+ ( <> when 'true' ->
+ ( primop 'match_fail'
+ ({'function_clause'})
+ -| [{'function_name',{'module_info',0}}] )
+ -| ['compiler_generated'] )
+ end
+'module_info'/1 =
+ fun (_@c0) ->
+ case _@c0 of
+ <X> when 'true' ->
+ call 'erlang':'get_module_info'
+ ('non_variable_apply', X)
+ ( <_@c1> when 'true' ->
+ ( primop 'match_fail'
+ ({'function_clause',_@c1})
+ -| [{'function_name',{'module_info',1}}] )
+ -| ['compiler_generated'] )
+ end
+end
diff --git a/lib/compiler/vsn.mk b/lib/compiler/vsn.mk
index 463c264a5f..27ee5a3fb7 100644
--- a/lib/compiler/vsn.mk
+++ b/lib/compiler/vsn.mk
@@ -1 +1 @@
-COMPILER_VSN = 7.1
+COMPILER_VSN = 7.1.1
diff --git a/lib/crypto/c_src/crypto.c b/lib/crypto/c_src/crypto.c
index 688ec339aa..1d9c1e0f88 100644
--- a/lib/crypto/c_src/crypto.c
+++ b/lib/crypto/c_src/crypto.c
@@ -110,6 +110,10 @@
#endif
+#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
+# define HAS_EVP_PKEY_CTX
+#endif
+
#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
#include <openssl/modes.h>
@@ -433,13 +437,11 @@ static ERL_NIF_TERM strong_rand_bytes_nif(ErlNifEnv* env, int argc, const ERL_NI
static ERL_NIF_TERM strong_rand_range_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rand_uniform_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM mod_exp_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
-static ERL_NIF_TERM dss_verify_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
-static ERL_NIF_TERM rsa_verify_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM do_exor(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rc4_set_key(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rc4_encrypt_with_state(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
-static ERL_NIF_TERM rsa_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
-static ERL_NIF_TERM dss_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
+static ERL_NIF_TERM pkey_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
+static ERL_NIF_TERM pkey_verify_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rsa_public_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rsa_private_crypt(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rsa_generate_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
@@ -452,8 +454,6 @@ static ERL_NIF_TERM srp_user_secret_nif(ErlNifEnv* env, int argc, const ERL_NIF_
static ERL_NIF_TERM srp_host_secret_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM ec_key_generate(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
-static ERL_NIF_TERM ecdsa_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
-static ERL_NIF_TERM ecdsa_verify_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM ecdh_compute_key_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
static ERL_NIF_TERM rand_seed_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[]);
@@ -506,13 +506,11 @@ static ErlNifFunc nif_funcs[] = {
{"strong_rand_range_nif", 1, strong_rand_range_nif},
{"rand_uniform_nif", 2, rand_uniform_nif},
{"mod_exp_nif", 4, mod_exp_nif},
- {"dss_verify_nif", 4, dss_verify_nif},
- {"rsa_verify_nif", 4, rsa_verify_nif},
{"do_exor", 2, do_exor},
{"rc4_set_key", 1, rc4_set_key},
{"rc4_encrypt_with_state", 2, rc4_encrypt_with_state},
- {"rsa_sign_nif", 3, rsa_sign_nif},
- {"dss_sign_nif", 3, dss_sign_nif},
+ {"pkey_sign_nif", 5, pkey_sign_nif},
+ {"pkey_verify_nif", 6, pkey_verify_nif},
{"rsa_public_crypt", 4, rsa_public_crypt},
{"rsa_private_crypt", 4, rsa_private_crypt},
{"rsa_generate_key_nif", 2, rsa_generate_key_nif},
@@ -525,8 +523,6 @@ static ErlNifFunc nif_funcs[] = {
{"srp_host_secret_nif", 5, srp_host_secret_nif},
{"ec_key_generate", 2, ec_key_generate},
- {"ecdsa_sign_nif", 4, ecdsa_sign_nif},
- {"ecdsa_verify_nif", 5, ecdsa_verify_nif},
{"ecdh_compute_key_nif", 3, ecdh_compute_key_nif},
{"rand_seed_nif", 1, rand_seed_nif},
@@ -589,6 +585,23 @@ static ERL_NIF_TERM atom_des_ecb;
static ERL_NIF_TERM atom_blowfish_ecb;
#endif
+static ERL_NIF_TERM atom_rsa;
+static ERL_NIF_TERM atom_dss;
+static ERL_NIF_TERM atom_ecdsa;
+static ERL_NIF_TERM atom_rsa_mgf1_md;
+static ERL_NIF_TERM atom_rsa_padding;
+static ERL_NIF_TERM atom_rsa_pkcs1_pss_padding;
+static ERL_NIF_TERM atom_rsa_x931_padding;
+static ERL_NIF_TERM atom_rsa_pss_saltlen;
+static ERL_NIF_TERM atom_sha224;
+static ERL_NIF_TERM atom_sha256;
+static ERL_NIF_TERM atom_sha384;
+static ERL_NIF_TERM atom_sha512;
+static ERL_NIF_TERM atom_md5;
+static ERL_NIF_TERM atom_ripemd160;
+
+
+
static ErlNifResourceType* hmac_context_rtype;
struct hmac_context
{
@@ -916,6 +929,20 @@ static int initialize(ErlNifEnv* env, ERL_NIF_TERM load_info)
#else
atom_not_supported = enif_make_atom(env,"not_supported");
#endif
+ atom_rsa = enif_make_atom(env,"rsa");
+ atom_dss = enif_make_atom(env,"dss");
+ atom_ecdsa = enif_make_atom(env,"ecdsa");
+ atom_rsa_mgf1_md = enif_make_atom(env,"rsa_mgf1_md");
+ atom_rsa_padding = enif_make_atom(env,"rsa_padding");
+ atom_rsa_pkcs1_pss_padding = enif_make_atom(env,"rsa_pkcs1_pss_padding");
+ atom_rsa_x931_padding = enif_make_atom(env,"rsa_x931_padding");
+ atom_rsa_pss_saltlen = enif_make_atom(env,"rsa_pss_saltlen");
+ atom_sha224 = enif_make_atom(env,"sha224");
+ atom_sha256 = enif_make_atom(env,"sha256");
+ atom_sha384 = enif_make_atom(env,"sha384");
+ atom_sha512 = enif_make_atom(env,"sha512");
+ atom_md5 = enif_make_atom(env,"md5");
+ atom_ripemd160 = enif_make_atom(env,"ripemd160");
init_digest_types(env);
init_cipher_types(env);
@@ -1010,6 +1037,8 @@ static int algo_pubkey_cnt, algo_pubkey_fips_cnt;
static ERL_NIF_TERM algo_pubkey[7]; /* increase when extending the list */
static int algo_cipher_cnt, algo_cipher_fips_cnt;
static ERL_NIF_TERM algo_cipher[24]; /* increase when extending the list */
+static int algo_mac_cnt, algo_mac_fips_cnt;
+static ERL_NIF_TERM algo_mac[2]; /* increase when extending the list */
static void init_algorithms_types(ErlNifEnv* env)
{
@@ -1093,9 +1122,19 @@ static void init_algorithms_types(ErlNifEnv* env)
algo_cipher[algo_cipher_cnt++] = enif_make_atom(env,"chacha20_poly1305");
#endif
+ // Validated algorithms first
+ algo_mac_cnt = 0;
+ algo_mac[algo_mac_cnt++] = enif_make_atom(env,"hmac");
+#ifdef HAVE_CMAC
+ algo_mac[algo_mac_cnt++] = enif_make_atom(env,"cmac");
+#endif
+ // Non-validated algorithms follow
+ algo_mac_fips_cnt = algo_mac_cnt;
+
ASSERT(algo_hash_cnt <= sizeof(algo_hash)/sizeof(ERL_NIF_TERM));
ASSERT(algo_pubkey_cnt <= sizeof(algo_pubkey)/sizeof(ERL_NIF_TERM));
ASSERT(algo_cipher_cnt <= sizeof(algo_cipher)/sizeof(ERL_NIF_TERM));
+ ASSERT(algo_mac_cnt <= sizeof(algo_mac)/sizeof(ERL_NIF_TERM));
}
static ERL_NIF_TERM algorithms(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
@@ -1105,15 +1144,19 @@ static ERL_NIF_TERM algorithms(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv
int hash_cnt = fips_mode ? algo_hash_fips_cnt : algo_hash_cnt;
int pubkey_cnt = fips_mode ? algo_pubkey_fips_cnt : algo_pubkey_cnt;
int cipher_cnt = fips_mode ? algo_cipher_fips_cnt : algo_cipher_cnt;
+ int mac_cnt = fips_mode ? algo_mac_fips_cnt : algo_mac_cnt;
#else
int hash_cnt = algo_hash_cnt;
int pubkey_cnt = algo_pubkey_cnt;
int cipher_cnt = algo_cipher_cnt;
+ int mac_cnt = algo_mac_cnt;
#endif
- return enif_make_tuple3(env,
+ return enif_make_tuple4(env,
enif_make_list_from_array(env, algo_hash, hash_cnt),
enif_make_list_from_array(env, algo_pubkey, pubkey_cnt),
- enif_make_list_from_array(env, algo_cipher, cipher_cnt));
+ enif_make_list_from_array(env, algo_cipher, cipher_cnt),
+ enif_make_list_from_array(env, algo_mac, mac_cnt)
+ );
}
static ERL_NIF_TERM info_lib(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
@@ -2448,44 +2491,6 @@ static ERL_NIF_TERM mod_exp_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM arg
return ret;
}
-static ERL_NIF_TERM dss_verify_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (sha, Digest, Signature,Key=[P, Q, G, Y]) */
- ErlNifBinary digest_bin, sign_bin;
- BIGNUM *dsa_p = NULL, *dsa_q = NULL, *dsa_g = NULL, *dsa_y = NULL;
- ERL_NIF_TERM head, tail;
- DSA *dsa;
- int i;
-
- if (argv[0] != atom_sha
- || !enif_inspect_binary(env, argv[1], &digest_bin)
- || digest_bin.size != SHA_DIGEST_LENGTH
- || !enif_inspect_binary(env, argv[2], &sign_bin)
- || !enif_get_list_cell(env, argv[3], &head, &tail)
- || !get_bn_from_bin(env, head, &dsa_p)
- || !enif_get_list_cell(env, tail, &head, &tail)
- || !get_bn_from_bin(env, head, &dsa_q)
- || !enif_get_list_cell(env, tail, &head, &tail)
- || !get_bn_from_bin(env, head, &dsa_g)
- || !enif_get_list_cell(env, tail, &head, &tail)
- || !get_bn_from_bin(env, head, &dsa_y)
- || !enif_is_empty_list(env,tail)) {
-
- if (dsa_p) BN_free(dsa_p);
- if (dsa_q) BN_free(dsa_q);
- if (dsa_g) BN_free(dsa_g);
- if (dsa_y) BN_free(dsa_y);
- return enif_make_badarg(env);
- }
-
- dsa = DSA_new();
- DSA_set0_pqg(dsa, dsa_p, dsa_q, dsa_g);
- DSA_set0_key(dsa, dsa_y, NULL);
- i = DSA_verify(0, digest_bin.data, SHA_DIGEST_LENGTH,
- sign_bin.data, sign_bin.size, dsa);
- DSA_free(dsa);
- return(i > 0) ? atom_true : atom_false;
-}
-
static void init_digest_types(ErlNifEnv* env)
{
struct digest_type_t* p = digest_types;
@@ -2532,73 +2537,6 @@ static struct cipher_type_t* get_cipher_type(ERL_NIF_TERM type, size_t key_len)
return NULL;
}
-static ERL_NIF_TERM rsa_verify_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (Type, Digest, Signature, Key=[E,N]) */
- ErlNifBinary digest_bin, sign_bin;
- ERL_NIF_TERM head, tail, ret;
- int i;
- RSA *rsa;
-#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
- EVP_PKEY *pkey;
- EVP_PKEY_CTX *ctx;
-#endif
- const EVP_MD *md;
- const ERL_NIF_TERM type = argv[0];
- struct digest_type_t *digp = NULL;
- BIGNUM *rsa_e;
- BIGNUM *rsa_n;
-
- digp = get_digest_type(type);
- if (!digp) {
- return enif_make_badarg(env);
- }
- md = digp->md.p;
- if (!md) {
- return atom_notsup;
- }
-
- rsa = RSA_new();
-
- if (!enif_inspect_binary(env, argv[1], &digest_bin)
- || digest_bin.size != EVP_MD_size(md)
- || !enif_inspect_binary(env, argv[2], &sign_bin)
- || !enif_get_list_cell(env, argv[3], &head, &tail)
- || !get_bn_from_bin(env, head, &rsa_e)
- || !enif_get_list_cell(env, tail, &head, &tail)
- || !get_bn_from_bin(env, head, &rsa_n)
- || !enif_is_empty_list(env, tail)) {
-
- ret = enif_make_badarg(env);
- goto done;
- }
-
- (void) RSA_set0_key(rsa, rsa_n, rsa_e, NULL);
-
-#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
- pkey = EVP_PKEY_new();
- EVP_PKEY_set1_RSA(pkey, rsa);
-
- ctx = EVP_PKEY_CTX_new(pkey, NULL);
- EVP_PKEY_verify_init(ctx);
- EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING);
- EVP_PKEY_CTX_set_signature_md(ctx, md);
-
- i = EVP_PKEY_verify(ctx, sign_bin.data, sign_bin.size,
- digest_bin.data, digest_bin.size);
- EVP_PKEY_CTX_free(ctx);
- EVP_PKEY_free(pkey);
-#else
- i = RSA_verify(md->type, digest_bin.data, EVP_MD_size(md),
- sign_bin.data, sign_bin.size, rsa);
-#endif
-
- ret = (i==1 ? atom_true : atom_false);
-
-done:
- RSA_free(rsa);
- return ret;
-}
-
static ERL_NIF_TERM do_exor(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{/* (Data1, Data2) */
ErlNifBinary d1, d2;
@@ -2702,100 +2640,33 @@ static int get_rsa_private_key(ErlNifEnv* env, ERL_NIF_TERM key, RSA *rsa)
return 1;
}
-static ERL_NIF_TERM rsa_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (Type, Digest, Key=[E,N,D]|[E,N,D,P1,P2,E1,E2,C]) */
- ErlNifBinary digest_bin, ret_bin;
-#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
- EVP_PKEY *pkey;
- EVP_PKEY_CTX *ctx;
- size_t rsa_s_len;
-#else
- unsigned rsa_s_len, len;
-#endif
- RSA *rsa;
- int i;
- struct digest_type_t *digp;
- const EVP_MD *md;
-
- digp = get_digest_type(argv[0]);
- if (!digp) {
- return enif_make_badarg(env);
- }
- md = digp->md.p;
- if (!md) {
- return atom_notsup;
- }
- if (!enif_inspect_binary(env,argv[1],&digest_bin)
- || digest_bin.size != EVP_MD_size(md)) {
- return enif_make_badarg(env);
- }
+static int get_rsa_public_key(ErlNifEnv* env, ERL_NIF_TERM key, RSA *rsa)
+{
+ /* key=[E,N] */
+ ERL_NIF_TERM head, tail;
+ BIGNUM *e, *n;
- rsa = RSA_new();
- if (!get_rsa_private_key(env, argv[2], rsa)) {
- RSA_free(rsa);
- return enif_make_badarg(env);
+ if (!enif_get_list_cell(env, key, &head, &tail)
+ || !get_bn_from_bin(env, head, &e)
+ || !enif_get_list_cell(env, tail, &head, &tail)
+ || !get_bn_from_bin(env, head, &n)
+ || !enif_is_empty_list(env, tail)) {
+ return 0;
}
-
-#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
- pkey = EVP_PKEY_new();
- EVP_PKEY_set1_RSA(pkey, rsa);
- rsa_s_len=(size_t)EVP_PKEY_size(pkey);
- enif_alloc_binary(rsa_s_len, &ret_bin);
-
- ctx = EVP_PKEY_CTX_new(pkey, NULL);
- EVP_PKEY_sign_init(ctx);
- EVP_PKEY_CTX_set_rsa_padding(ctx, RSA_PKCS1_PADDING);
- EVP_PKEY_CTX_set_signature_md(ctx, md);
-
- i = EVP_PKEY_sign(ctx, ret_bin.data, &rsa_s_len,
- digest_bin.data, digest_bin.size);
- ASSERT(i<=0 || rsa_s_len <= ret_bin.size);
- EVP_PKEY_CTX_free(ctx);
- EVP_PKEY_free(pkey);
-#else
- enif_alloc_binary(RSA_size(rsa), &ret_bin);
- len = EVP_MD_size(md);
-
- ERL_VALGRIND_ASSERT_MEM_DEFINED(digest_bin.data, len);
- i = RSA_sign(md->type, digest_bin.data, len,
- ret_bin.data, &rsa_s_len, rsa);
-#endif
-
- RSA_free(rsa);
- if (i > 0) {
- ERL_VALGRIND_MAKE_MEM_DEFINED(ret_bin.data, rsa_s_len);
- if (rsa_s_len != ret_bin.size) {
- enif_realloc_binary(&ret_bin, rsa_s_len);
- ERL_VALGRIND_ASSERT_MEM_DEFINED(ret_bin.data, rsa_s_len);
- }
- return enif_make_binary(env,&ret_bin);
- }
- else {
- enif_release_binary(&ret_bin);
- return atom_error;
- }
+ (void) RSA_set0_key(rsa, n, e, NULL);
+ return 1;
}
-
-static ERL_NIF_TERM dss_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (sha, Digest, Key=[P,Q,G,PrivKey]) */
- ErlNifBinary digest_bin, ret_bin;
+static int get_dss_private_key(ErlNifEnv* env, ERL_NIF_TERM key, DSA *dsa)
+{
+ /* key=[P,Q,G,KEY] */
ERL_NIF_TERM head, tail;
- unsigned int dsa_s_len;
- DSA* dsa;
BIGNUM *dsa_p = NULL, *dsa_q = NULL, *dsa_g = NULL;
BIGNUM *dummy_pub_key, *priv_key = NULL;
- int i;
-
- if (argv[0] != atom_sha
- || !enif_inspect_binary(env, argv[1], &digest_bin)
- || digest_bin.size != SHA_DIGEST_LENGTH) {
- return enif_make_badarg(env);
- }
- if (!enif_get_list_cell(env, argv[2], &head, &tail)
+ if (!enif_get_list_cell(env, key, &head, &tail)
|| !get_bn_from_bin(env, head, &dsa_p)
|| !enif_get_list_cell(env, tail, &head, &tail)
|| !get_bn_from_bin(env, head, &dsa_q)
@@ -2808,7 +2679,7 @@ static ERL_NIF_TERM dss_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM ar
if (dsa_q) BN_free(dsa_q);
if (dsa_g) BN_free(dsa_g);
if (priv_key) BN_free(priv_key);
- return enif_make_badarg(env);
+ return 0;
}
/* Note: DSA_set0_key() does not allow setting only the
@@ -2818,27 +2689,39 @@ static ERL_NIF_TERM dss_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM ar
*/
dummy_pub_key = BN_dup(priv_key);
- dsa = DSA_new();
DSA_set0_pqg(dsa, dsa_p, dsa_q, dsa_g);
DSA_set0_key(dsa, dummy_pub_key, priv_key);
- enif_alloc_binary(DSA_size(dsa), &ret_bin);
- i = DSA_sign(NID_sha1, digest_bin.data, SHA_DIGEST_LENGTH,
- ret_bin.data, &dsa_s_len, dsa);
- DSA_free(dsa);
-
- if (i) {
- if (dsa_s_len != ret_bin.size) {
- enif_realloc_binary(&ret_bin, dsa_s_len);
- }
- return enif_make_binary(env, &ret_bin);
- }
- else {
- enif_release_binary(&ret_bin);
- return atom_error;
- }
+ return 1;
}
+static int get_dss_public_key(ErlNifEnv* env, ERL_NIF_TERM key, DSA *dsa)
+{
+ /* key=[P, Q, G, Y] */
+ ERL_NIF_TERM head, tail;
+ BIGNUM *dsa_p = NULL, *dsa_q = NULL, *dsa_g = NULL, *dsa_y = NULL;
+
+ if (!enif_get_list_cell(env, key, &head, &tail)
+ || !get_bn_from_bin(env, head, &dsa_p)
+ || !enif_get_list_cell(env, tail, &head, &tail)
+ || !get_bn_from_bin(env, head, &dsa_q)
+ || !enif_get_list_cell(env, tail, &head, &tail)
+ || !get_bn_from_bin(env, head, &dsa_g)
+ || !enif_get_list_cell(env, tail, &head, &tail)
+ || !get_bn_from_bin(env, head, &dsa_y)
+ || !enif_is_empty_list(env,tail)) {
+ if (dsa_p) BN_free(dsa_p);
+ if (dsa_q) BN_free(dsa_q);
+ if (dsa_g) BN_free(dsa_g);
+ if (dsa_y) BN_free(dsa_y);
+ return 0;
+ }
+
+ DSA_set0_pqg(dsa, dsa_p, dsa_q, dsa_g);
+ DSA_set0_key(dsa, dsa_y, NULL);
+ return 1;
+}
+
static int rsa_pad(ERL_NIF_TERM term, int* padding)
{
if (term == atom_rsa_pkcs1_padding) {
@@ -3788,99 +3671,6 @@ badarg:
#endif
}
-static ERL_NIF_TERM ecdsa_sign_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (Type, Digest, Curve, Key) */
-#if defined(HAVE_EC)
- ErlNifBinary digest_bin, ret_bin;
- unsigned int dsa_s_len;
- EC_KEY* key = NULL;
- int i, len;
- struct digest_type_t *digp;
- const EVP_MD *md;
-
- digp = get_digest_type(argv[0]);
- if (!digp) {
- return enif_make_badarg(env);
- }
- md = digp->md.p;
- if (!md) {
- return atom_notsup;
- }
- len = EVP_MD_size(md);
-
- if (!enif_inspect_binary(env,argv[1],&digest_bin)
- || digest_bin.size != len
- || !get_ec_key(env, argv[2], argv[3], atom_undefined, &key))
- goto badarg;
-
- enif_alloc_binary(ECDSA_size(key), &ret_bin);
-
- i = ECDSA_sign(EVP_MD_type(md), digest_bin.data, len,
- ret_bin.data, &dsa_s_len, key);
-
- EC_KEY_free(key);
- if (i) {
- if (dsa_s_len != ret_bin.size) {
- enif_realloc_binary(&ret_bin, dsa_s_len);
- }
- return enif_make_binary(env, &ret_bin);
- }
- else {
- enif_release_binary(&ret_bin);
- return atom_error;
- }
-
-badarg:
- if (key)
- EC_KEY_free(key);
- return make_badarg_maybe(env);
-#else
- return atom_notsup;
-#endif
-}
-
-static ERL_NIF_TERM ecdsa_verify_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
-{/* (Type, Digest, Signature, Curve, Key) */
-#if defined(HAVE_EC)
- ErlNifBinary digest_bin, sign_bin;
- int i, len;
- EC_KEY* key = NULL;
- const ERL_NIF_TERM type = argv[0];
- struct digest_type_t *digp = NULL;
- const EVP_MD *md;
-
- digp = get_digest_type(type);
- if (!digp) {
- return enif_make_badarg(env);
- }
- md = digp->md.p;
- if (!md) {
- return atom_notsup;
- }
- len = EVP_MD_size(md);
-
- if (!enif_inspect_binary(env, argv[1], &digest_bin)
- || digest_bin.size != len
- || !enif_inspect_binary(env, argv[2], &sign_bin)
- || !get_ec_key(env, argv[3], atom_undefined, argv[4], &key))
- goto badarg;
-
- i = ECDSA_verify(EVP_MD_type(md), digest_bin.data, len,
- sign_bin.data, sign_bin.size, key);
-
- EC_KEY_free(key);
-
- return (i==1 ? atom_true : atom_false);
-
-badarg:
- if (key)
- EC_KEY_free(key);
- return make_badarg_maybe(env);
-#else
- return atom_notsup;
-#endif
-}
-
/*
(_OthersPublicKey, _MyPrivateKey)
(_OthersPublicKey, _MyEC_Point)
@@ -3939,6 +3729,548 @@ out_err:
#endif
}
+/*================================================================*/
+#define PKEY_BADARG -1
+#define PKEY_NOTSUP 0
+#define PKEY_OK 1
+
+typedef struct PKeyCryptOptions {
+ const EVP_MD *rsa_mgf1_md;
+ ErlNifBinary rsa_oaep_label;
+ const EVP_MD *rsa_oaep_md;
+ int rsa_padding;
+ const EVP_MD *signature_md;
+} PKeyCryptOptions;
+
+typedef struct PKeySignOptions {
+ const EVP_MD *rsa_mgf1_md;
+ int rsa_padding;
+ int rsa_pss_saltlen;
+} PKeySignOptions;
+
+static int get_pkey_digest_type(ErlNifEnv *env, ERL_NIF_TERM algorithm, ERL_NIF_TERM type,
+ const EVP_MD **md)
+{
+ struct digest_type_t *digp = NULL;
+ *md = NULL;
+
+ if (type == atom_none && algorithm == atom_rsa) return PKEY_OK;
+
+ digp = get_digest_type(type);
+ if (!digp) return PKEY_BADARG;
+ if (!digp->md.p) return PKEY_NOTSUP;
+
+ *md = digp->md.p;
+ return PKEY_OK;
+}
+
+
+static int get_pkey_sign_digest(ErlNifEnv *env, ERL_NIF_TERM algorithm,
+ ERL_NIF_TERM type, ERL_NIF_TERM data,
+ unsigned char *md_value, const EVP_MD **mdp,
+ unsigned char **tbsp, size_t *tbslenp)
+{
+ int i;
+ const ERL_NIF_TERM *tpl_terms;
+ int tpl_arity;
+ ErlNifBinary tbs_bin;
+ EVP_MD_CTX *mdctx;
+ const EVP_MD *md = *mdp;
+ unsigned char *tbs = *tbsp;
+ size_t tbslen = *tbslenp;
+ unsigned int tbsleni;
+
+ if ((i = get_pkey_digest_type(env, algorithm, type, &md)) != PKEY_OK) {
+ return i;
+ }
+ if (enif_get_tuple(env, data, &tpl_arity, &tpl_terms)) {
+ if (tpl_arity != 2 || tpl_terms[0] != atom_digest
+ || !enif_inspect_binary(env, tpl_terms[1], &tbs_bin)
+ || (md != NULL && tbs_bin.size != EVP_MD_size(md))) {
+ return PKEY_BADARG;
+ }
+ /* We have a digest (= hashed text) in tbs_bin */
+ tbs = tbs_bin.data;
+ tbslen = tbs_bin.size;
+ } else if (md == NULL) {
+ if (!enif_inspect_binary(env, data, &tbs_bin)) {
+ return PKEY_BADARG;
+ }
+ /* md == NULL, that is no hashing because DigestType argument was atom_none */
+ tbs = tbs_bin.data;
+ tbslen = tbs_bin.size;
+ } else {
+ if (!enif_inspect_binary(env, data, &tbs_bin)) {
+ return PKEY_BADARG;
+ }
+ /* We have the cleartext in tbs_bin and the hash algo info in md */
+ tbs = md_value;
+ mdctx = EVP_MD_CTX_create();
+ if (!mdctx) {
+ return PKEY_BADARG;
+ }
+ /* Looks well, now hash the plain text into a digest according to md */
+ if (EVP_DigestInit_ex(mdctx, md, NULL) <= 0) {
+ EVP_MD_CTX_destroy(mdctx);
+ return PKEY_BADARG;
+ }
+ if (EVP_DigestUpdate(mdctx, tbs_bin.data, tbs_bin.size) <= 0) {
+ EVP_MD_CTX_destroy(mdctx);
+ return PKEY_BADARG;
+ }
+ if (EVP_DigestFinal_ex(mdctx, tbs, &tbsleni) <= 0) {
+ EVP_MD_CTX_destroy(mdctx);
+ return PKEY_BADARG;
+ }
+ tbslen = (size_t)(tbsleni);
+ EVP_MD_CTX_destroy(mdctx);
+ }
+
+ *mdp = md;
+ *tbsp = tbs;
+ *tbslenp = tbslen;
+
+ return PKEY_OK;
+}
+
+
+static int get_pkey_sign_options(ErlNifEnv *env, ERL_NIF_TERM algorithm, ERL_NIF_TERM options,
+ const EVP_MD *md, PKeySignOptions *opt)
+{
+ ERL_NIF_TERM head, tail;
+ const ERL_NIF_TERM *tpl_terms;
+ int tpl_arity;
+ const EVP_MD *opt_md;
+ int i;
+
+ if (!enif_is_list(env, options)) {
+ return PKEY_BADARG;
+ }
+
+ /* defaults */
+ if (algorithm == atom_rsa) {
+ opt->rsa_mgf1_md = NULL;
+ opt->rsa_padding = RSA_PKCS1_PADDING;
+ opt->rsa_pss_saltlen = -2;
+ }
+
+ if (enif_is_empty_list(env, options)) {
+ return PKEY_OK;
+ }
+
+ if (algorithm == atom_rsa) {
+ tail = options;
+ while (enif_get_list_cell(env, tail, &head, &tail)) {
+ if (enif_get_tuple(env, head, &tpl_arity, &tpl_terms) && tpl_arity == 2) {
+ if (tpl_terms[0] == atom_rsa_mgf1_md && enif_is_atom(env, tpl_terms[1])) {
+ i = get_pkey_digest_type(env, algorithm, tpl_terms[1], &opt_md);
+ if (i != PKEY_OK) {
+ return i;
+ }
+ opt->rsa_mgf1_md = opt_md;
+ } else if (tpl_terms[0] == atom_rsa_padding) {
+ if (tpl_terms[1] == atom_rsa_pkcs1_padding) {
+ opt->rsa_padding = RSA_PKCS1_PADDING;
+ } else if (tpl_terms[1] == atom_rsa_pkcs1_pss_padding) {
+#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,0)
+ opt->rsa_padding = RSA_PKCS1_PSS_PADDING;
+ if (opt->rsa_mgf1_md == NULL) {
+ opt->rsa_mgf1_md = md;
+ }
+#else
+ return PKEY_NOTSUP;
+#endif
+ } else if (tpl_terms[1] == atom_rsa_x931_padding) {
+ opt->rsa_padding = RSA_X931_PADDING;
+ } else if (tpl_terms[1] == atom_rsa_no_padding) {
+ opt->rsa_padding = RSA_NO_PADDING;
+ } else {
+ return PKEY_BADARG;
+ }
+ } else if (tpl_terms[0] == atom_rsa_pss_saltlen) {
+ if (!enif_get_int(env, tpl_terms[1], &(opt->rsa_pss_saltlen))
+ || opt->rsa_pss_saltlen < -2) {
+ return PKEY_BADARG;
+ }
+ } else {
+ return PKEY_BADARG;
+ }
+ } else {
+ return PKEY_BADARG;
+ }
+ }
+ } else {
+ return PKEY_BADARG;
+ }
+
+ return PKEY_OK;
+}
+
+static int get_pkey_sign_key(ErlNifEnv *env, ERL_NIF_TERM algorithm, ERL_NIF_TERM key, EVP_PKEY **pkey)
+{
+ if (algorithm == atom_rsa) {
+ RSA *rsa = RSA_new();
+
+ if (!get_rsa_private_key(env, key, rsa)) {
+ RSA_free(rsa);
+ return PKEY_BADARG;
+ }
+
+ *pkey = EVP_PKEY_new();
+ if (!EVP_PKEY_assign_RSA(*pkey, rsa)) {
+ EVP_PKEY_free(*pkey);
+ RSA_free(rsa);
+ return PKEY_BADARG;
+ }
+ } else if (algorithm == atom_ecdsa) {
+#if defined(HAVE_EC)
+ EC_KEY *ec = NULL;
+ const ERL_NIF_TERM *tpl_terms;
+ int tpl_arity;
+
+ if (enif_get_tuple(env, key, &tpl_arity, &tpl_terms) && tpl_arity == 2
+ && enif_is_tuple(env, tpl_terms[0]) && enif_is_binary(env, tpl_terms[1])
+ && get_ec_key(env, tpl_terms[0], tpl_terms[1], atom_undefined, &ec)) {
+
+ *pkey = EVP_PKEY_new();
+ if (!EVP_PKEY_assign_EC_KEY(*pkey, ec)) {
+ EVP_PKEY_free(*pkey);
+ EC_KEY_free(ec);
+ return PKEY_BADARG;
+ }
+ } else {
+ return PKEY_BADARG;
+ }
+#else
+ return PKEY_NOTSUP;
+#endif
+ } else if (algorithm == atom_dss) {
+ DSA *dsa = DSA_new();
+
+ if (!get_dss_private_key(env, key, dsa)) {
+ DSA_free(dsa);
+ return PKEY_BADARG;
+ }
+
+ *pkey = EVP_PKEY_new();
+ if (!EVP_PKEY_assign_DSA(*pkey, dsa)) {
+ EVP_PKEY_free(*pkey);
+ DSA_free(dsa);
+ return PKEY_BADARG;
+ }
+ } else {
+ return PKEY_BADARG;
+ }
+
+ return PKEY_OK;
+}
+
+static ERL_NIF_TERM pkey_sign_nif(ErlNifEnv *env, int argc, const ERL_NIF_TERM argv[])
+{/* (Algorithm, Type, Data|{digest,Digest}, Key, Options) */
+ int i;
+ const EVP_MD *md = NULL;
+ unsigned char md_value[EVP_MAX_MD_SIZE];
+ EVP_PKEY *pkey;
+#ifdef HAS_EVP_PKEY_CTX
+ EVP_PKEY_CTX *ctx;
+ size_t siglen;
+#else
+ unsigned len, siglen;
+#endif
+ PKeySignOptions sig_opt;
+ ErlNifBinary sig_bin; /* signature */
+ unsigned char *tbs; /* data to be signed */
+ size_t tbslen;
+/*char buf[1024];
+enif_get_atom(env,argv[0],buf,1024,ERL_NIF_LATIN1); printf("algo=%s ",buf);
+enif_get_atom(env,argv[1],buf,1024,ERL_NIF_LATIN1); printf("hash=%s ",buf);
+printf("\r\n");
+*/
+ i = get_pkey_sign_digest(env, argv[0], argv[1], argv[2], md_value, &md, &tbs, &tbslen);
+ if (i != PKEY_OK) {
+ if (i == PKEY_NOTSUP)
+ return atom_notsup;
+ else
+ return enif_make_badarg(env);
+ }
+
+ i = get_pkey_sign_options(env, argv[0], argv[4], md, &sig_opt);
+ if (i != PKEY_OK) {
+ if (i == PKEY_NOTSUP)
+ return atom_notsup;
+ else
+ return enif_make_badarg(env);
+ }
+
+ if (get_pkey_sign_key(env, argv[0], argv[3], &pkey) != PKEY_OK) {
+ return enif_make_badarg(env);
+ }
+
+#ifdef HAS_EVP_PKEY_CTX
+/* printf("EVP interface\r\n");
+ */
+ ctx = EVP_PKEY_CTX_new(pkey, NULL);
+ if (!ctx) goto badarg;
+ if (EVP_PKEY_sign_init(ctx) <= 0) goto badarg;
+ if (md != NULL && EVP_PKEY_CTX_set_signature_md(ctx, md) <= 0) goto badarg;
+
+ if (argv[0] == atom_rsa) {
+ if (EVP_PKEY_CTX_set_rsa_padding(ctx, sig_opt.rsa_padding) <= 0) goto badarg;
+ if (sig_opt.rsa_padding == RSA_PKCS1_PSS_PADDING) {
+ if (sig_opt.rsa_mgf1_md != NULL) {
+#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,1)
+ if (EVP_PKEY_CTX_set_rsa_mgf1_md(ctx, sig_opt.rsa_mgf1_md) <= 0) goto badarg;
+#else
+ EVP_PKEY_CTX_free(ctx);
+ EVP_PKEY_free(pkey);
+ return atom_notsup;
+#endif
+ }
+ if (sig_opt.rsa_pss_saltlen > -2
+ && EVP_PKEY_CTX_set_rsa_pss_saltlen(ctx, sig_opt.rsa_pss_saltlen) <= 0)
+ goto badarg;
+ }
+ }
+
+ if (EVP_PKEY_sign(ctx, NULL, &siglen, tbs, tbslen) <= 0) goto badarg;
+ enif_alloc_binary(siglen, &sig_bin);
+
+ if (md != NULL) {
+ ERL_VALGRIND_ASSERT_MEM_DEFINED(tbs, EVP_MD_size(md));
+ }
+ i = EVP_PKEY_sign(ctx, sig_bin.data, &siglen, tbs, tbslen);
+
+ EVP_PKEY_CTX_free(ctx);
+#else
+/*printf("Old interface\r\n");
+ */
+ if (argv[0] == atom_rsa) {
+ RSA *rsa = EVP_PKEY_get1_RSA(pkey);
+ enif_alloc_binary(RSA_size(rsa), &sig_bin);
+ len = EVP_MD_size(md);
+ ERL_VALGRIND_ASSERT_MEM_DEFINED(tbs, len);
+ i = RSA_sign(md->type, tbs, len, sig_bin.data, &siglen, rsa);
+ RSA_free(rsa);
+ } else if (argv[0] == atom_dss) {
+ DSA *dsa = EVP_PKEY_get1_DSA(pkey);
+ enif_alloc_binary(DSA_size(dsa), &sig_bin);
+ len = EVP_MD_size(md);
+ ERL_VALGRIND_ASSERT_MEM_DEFINED(tbs, len);
+ i = DSA_sign(md->type, tbs, len, sig_bin.data, &siglen, dsa);
+ DSA_free(dsa);
+ } else if (argv[0] == atom_ecdsa) {
+#if defined(HAVE_EC)
+ EC_KEY *ec = EVP_PKEY_get1_EC_KEY(pkey);
+ enif_alloc_binary(ECDSA_size(ec), &sig_bin);
+ len = EVP_MD_size(md);
+ ERL_VALGRIND_ASSERT_MEM_DEFINED(tbs, len);
+ i = ECDSA_sign(md->type, tbs, len, sig_bin.data, &siglen, ec);
+ EC_KEY_free(ec);
+#else
+ EVP_PKEY_free(pkey);
+ return atom_notsup;
+#endif
+ } else {
+ goto badarg;
+ }
+#endif
+
+ EVP_PKEY_free(pkey);
+ if (i == 1) {
+ ERL_VALGRIND_MAKE_MEM_DEFINED(sig_bin.data, siglen);
+ if (siglen != sig_bin.size) {
+ enif_realloc_binary(&sig_bin, siglen);
+ ERL_VALGRIND_ASSERT_MEM_DEFINED(sig_bin.data, siglen);
+ }
+ return enif_make_binary(env, &sig_bin);
+ } else {
+ enif_release_binary(&sig_bin);
+ return atom_error;
+ }
+
+ badarg:
+#ifdef HAS_EVP_PKEY_CTX
+ EVP_PKEY_CTX_free(ctx);
+#endif
+ EVP_PKEY_free(pkey);
+ return enif_make_badarg(env);
+}
+
+
+static int get_pkey_verify_key(ErlNifEnv *env, ERL_NIF_TERM algorithm, ERL_NIF_TERM key,
+ EVP_PKEY **pkey)
+{
+ if (algorithm == atom_rsa) {
+ RSA *rsa = RSA_new();
+
+ if (!get_rsa_public_key(env, key, rsa)) {
+ RSA_free(rsa);
+ return PKEY_BADARG;
+ }
+
+ *pkey = EVP_PKEY_new();
+ if (!EVP_PKEY_assign_RSA(*pkey, rsa)) {
+ EVP_PKEY_free(*pkey);
+ RSA_free(rsa);
+ return PKEY_BADARG;
+ }
+ } else if (algorithm == atom_ecdsa) {
+#if defined(HAVE_EC)
+ EC_KEY *ec = NULL;
+ const ERL_NIF_TERM *tpl_terms;
+ int tpl_arity;
+
+ if (enif_get_tuple(env, key, &tpl_arity, &tpl_terms) && tpl_arity == 2
+ && enif_is_tuple(env, tpl_terms[0]) && enif_is_binary(env, tpl_terms[1])
+ && get_ec_key(env, tpl_terms[0], atom_undefined, tpl_terms[1], &ec)) {
+
+ *pkey = EVP_PKEY_new();
+ if (!EVP_PKEY_assign_EC_KEY(*pkey, ec)) {
+ EVP_PKEY_free(*pkey);
+ EC_KEY_free(ec);
+ return PKEY_BADARG;
+ }
+ } else {
+ return PKEY_BADARG;
+ }
+#else
+ return PKEY_NOTSUP;
+#endif
+ } else if (algorithm == atom_dss) {
+ DSA *dsa = DSA_new();
+
+ if (!get_dss_public_key(env, key, dsa)) {
+ DSA_free(dsa);
+ return PKEY_BADARG;
+ }
+
+ *pkey = EVP_PKEY_new();
+ if (!EVP_PKEY_assign_DSA(*pkey, dsa)) {
+ EVP_PKEY_free(*pkey);
+ DSA_free(dsa);
+ return PKEY_BADARG;
+ }
+ } else {
+ return PKEY_BADARG;
+ }
+
+ return PKEY_OK;
+}
+
+static ERL_NIF_TERM pkey_verify_nif(ErlNifEnv *env, int argc, const ERL_NIF_TERM argv[])
+{/* (Algorithm, Type, Data|{digest,Digest}, Signature, Key, Options) */
+ int i;
+ const EVP_MD *md = NULL;
+ unsigned char md_value[EVP_MAX_MD_SIZE];
+ EVP_PKEY *pkey;
+#ifdef HAS_EVP_PKEY_CTX
+ EVP_PKEY_CTX *ctx;
+#else
+#endif
+ PKeySignOptions sig_opt;
+ ErlNifBinary sig_bin; /* signature */
+ unsigned char *tbs; /* data to be signed */
+ size_t tbslen;
+
+ if (!enif_inspect_binary(env, argv[3], &sig_bin)) {
+ return enif_make_badarg(env);
+ }
+
+ i = get_pkey_sign_digest(env, argv[0], argv[1], argv[2], md_value, &md, &tbs, &tbslen);
+ if (i != PKEY_OK) {
+ if (i == PKEY_NOTSUP)
+ return atom_notsup;
+ else
+ return enif_make_badarg(env);
+ }
+
+ i = get_pkey_sign_options(env, argv[0], argv[5], md, &sig_opt);
+ if (i != PKEY_OK) {
+ if (i == PKEY_NOTSUP)
+ return atom_notsup;
+ else
+ return enif_make_badarg(env);
+ }
+
+ if (get_pkey_verify_key(env, argv[0], argv[4], &pkey) != PKEY_OK) {
+ return enif_make_badarg(env);
+ }
+
+#ifdef HAS_EVP_PKEY_CTX
+/* printf("EVP interface\r\n");
+ */
+ ctx = EVP_PKEY_CTX_new(pkey, NULL);
+ if (!ctx) goto badarg;
+ if (EVP_PKEY_verify_init(ctx) <= 0) goto badarg;
+ if (md != NULL && EVP_PKEY_CTX_set_signature_md(ctx, md) <= 0) goto badarg;
+
+ if (argv[0] == atom_rsa) {
+ if (EVP_PKEY_CTX_set_rsa_padding(ctx, sig_opt.rsa_padding) <= 0) goto badarg;
+ if (sig_opt.rsa_padding == RSA_PKCS1_PSS_PADDING) {
+ if (sig_opt.rsa_mgf1_md != NULL) {
+#if OPENSSL_VERSION_NUMBER >= PACKED_OPENSSL_VERSION_PLAIN(1,0,1)
+ if (EVP_PKEY_CTX_set_rsa_mgf1_md(ctx, sig_opt.rsa_mgf1_md) <= 0) goto badarg;
+#else
+ EVP_PKEY_CTX_free(ctx);
+ EVP_PKEY_free(pkey);
+ return atom_notsup;
+#endif
+ }
+ if (sig_opt.rsa_pss_saltlen > -2
+ && EVP_PKEY_CTX_set_rsa_pss_saltlen(ctx, sig_opt.rsa_pss_saltlen) <= 0)
+ goto badarg;
+ }
+ }
+
+ if (md != NULL) {
+ ERL_VALGRIND_ASSERT_MEM_DEFINED(tbs, EVP_MD_size(md));
+ }
+ i = EVP_PKEY_verify(ctx, sig_bin.data, sig_bin.size, tbs, tbslen);
+
+ EVP_PKEY_CTX_free(ctx);
+#else
+/*printf("Old interface\r\n");
+*/
+ if (argv[0] == atom_rsa) {
+ RSA *rsa = EVP_PKEY_get1_RSA(pkey);
+ i = RSA_verify(md->type, tbs, tbslen, sig_bin.data, sig_bin.size, rsa);
+ RSA_free(rsa);
+ } else if (argv[0] == atom_dss) {
+ DSA *dsa = EVP_PKEY_get1_DSA(pkey);
+ i = DSA_verify(0, tbs, tbslen, sig_bin.data, sig_bin.size, dsa);
+ DSA_free(dsa);
+ } else if (argv[0] == atom_ecdsa) {
+#if defined(HAVE_EC)
+ EC_KEY *ec = EVP_PKEY_get1_EC_KEY(pkey);
+ i = ECDSA_verify(EVP_MD_type(md), tbs, tbslen, sig_bin.data, sig_bin.size, ec);
+ EC_KEY_free(ec);
+#else
+ EVP_PKEY_free(pkey);
+ return atom_notsup;
+#endif
+ } else {
+ goto badarg;
+ }
+#endif
+
+ EVP_PKEY_free(pkey);
+ if (i == 1) {
+ return atom_true;
+ } else {
+ return atom_false;
+ }
+
+ badarg:
+#ifdef HAS_EVP_PKEY_CTX
+ EVP_PKEY_CTX_free(ctx);
+#endif
+ EVP_PKEY_free(pkey);
+ return enif_make_badarg(env);
+}
+
+
+/*================================================================*/
+
static ERL_NIF_TERM rand_seed_nif(ErlNifEnv* env, int argc, const ERL_NIF_TERM argv[])
{
ErlNifBinary seed_bin;
diff --git a/lib/crypto/doc/src/crypto.xml b/lib/crypto/doc/src/crypto.xml
index 89ef529c5d..5b2c46a004 100644
--- a/lib/crypto/doc/src/crypto.xml
+++ b/lib/crypto/doc/src/crypto.xml
@@ -159,12 +159,24 @@
<code>digest_type() = md5 | sha | sha224 | sha256 | sha384 | sha512</code>
+ <code>rsa_digest_type() = md5 | ripemd160 | sha | sha224 | sha256 | sha384 | sha512</code>
+
+ <code>dss_digest_type() = sha | sha224 | sha256 | sha384 | sha512</code> <p>Note that the actual supported
+ dss_digest_type depends on the underlying crypto library. In OpenSSL version >= 1.0.1 the listed digest are supported, while in 1.0.0 only sha, sha224 and sha256 are supported. In version 0.9.8 only sha is supported.</p>
+
+ <code>ecdsa_digest_type() = sha | sha224 | sha256 | sha384 | sha512</code>
+
+ <code>sign_options() = [{rsa_pad, rsa_sign_padding()} | {rsa_pss_saltlen, integer()}]</code>
+
+ <code>rsa_sign_padding() = rsa_pkcs1_padding | rsa_pkcs1_pss_padding</code>
+
<code> hash_algorithms() = md5 | ripemd160 | sha | sha224 | sha256 | sha384 | sha512 </code> <p>md4 is also supported for hash_init/1 and hash/2.
Note that both md4 and md5 are recommended only for compatibility with existing applications.
</p>
<code> cipher_algorithms() = aes_cbc | aes_cfb8 | aes_cfb128 | aes_ctr | aes_gcm |
aes_ige256 | blowfish_cbc | blowfish_cfb64 | chacha20_poly1305 | des_cbc | des_cfb |
des3_cbc | des3_cfb | des_ede3 | rc2_cbc | rc4 </code>
+ <code> mac_algorithms() = hmac | cmac</code>
<code> public_key_algorithms() = rsa |dss | ecdsa | dh | ecdh | ec_gf2m</code>
<p>Note that ec_gf2m is not strictly a public key algorithm, but a restriction on what curves are supported
with ecdsa and ecdh.
@@ -681,6 +693,7 @@
<func>
<name>sign(Algorithm, DigestType, Msg, Key) -> binary()</name>
+ <name>sign(Algorithm, DigestType, Msg, Key, Options) -> binary()</name>
<fsummary> Create digital signature.</fsummary>
<type>
<v>Algorithm = rsa | dss | ecdsa </v>
@@ -688,8 +701,9 @@
<d>The msg is either the binary "cleartext" data to be
signed or it is the hashed value of "cleartext" i.e. the
digest (plaintext).</d>
- <v>DigestType = digest_type()</v>
+ <v>DigestType = rsa_digest_type() | dss_digest_type() | ecdsa_digest_type()</v>
<v>Key = rsa_private() | dss_private() | [ecdh_private(),ecdh_params()]</v>
+ <v>Options = sign_options()</v>
</type>
<desc>
<p>Creates a digital signature.</p>
@@ -835,7 +849,8 @@ _FloatValue = rand:uniform(). % [0.0; 1.0[</pre>
<type>
<v> AlgorithmList = [{hashs, [hash_algorithms()]},
{ciphers, [cipher_algorithms()]},
- {public_keys, [public_key_algorithms()]}
+ {public_keys, [public_key_algorithms()]},
+ {macs, [mac_algorithms()]}]
</v>
</type>
<desc>
@@ -869,15 +884,17 @@ _FloatValue = rand:uniform(). % [0.0; 1.0[</pre>
<func>
<name>verify(Algorithm, DigestType, Msg, Signature, Key) -> boolean()</name>
+ <name>verify(Algorithm, DigestType, Msg, Signature, Key, Options) -> boolean()</name>
<fsummary>Verifies a digital signature.</fsummary>
<type>
<v> Algorithm = rsa | dss | ecdsa </v>
<v>Msg = binary() | {digest,binary()}</v>
<d>The msg is either the binary "cleartext" data
or it is the hashed value of "cleartext" i.e. the digest (plaintext).</d>
- <v>DigestType = digest_type()</v>
+ <v>DigestType = rsa_digest_type() | dss_digest_type() | ecdsa_digest_type()</v>
<v>Signature = binary()</v>
<v>Key = rsa_public() | dss_public() | [ecdh_public(),ecdh_params()]</v>
+ <v>Options = sign_options()</v>
</type>
<desc>
<p>Verifies a digital signature</p>
diff --git a/lib/crypto/src/crypto.erl b/lib/crypto/src/crypto.erl
index d111525214..1df05462c9 100644
--- a/lib/crypto/src/crypto.erl
+++ b/lib/crypto/src/crypto.erl
@@ -25,7 +25,7 @@
-export([start/0, stop/0, info_lib/0, info_fips/0, supports/0, enable_fips_mode/1,
version/0, bytes_to_integer/1]).
-export([hash/2, hash_init/1, hash_update/2, hash_final/1]).
--export([sign/4, verify/5]).
+-export([sign/4, sign/5, verify/5, verify/6]).
-export([generate_key/2, generate_key/3, compute_key/4]).
-export([hmac/3, hmac/4, hmac_init/2, hmac_update/2, hmac_final/1, hmac_final_n/2]).
-export([cmac/3, cmac/4]).
@@ -45,6 +45,10 @@
-export([ec_curve/1, ec_curves/0]).
-export([rand_seed/1]).
+%% Private. For tests.
+-export([packed_openssl_version/4]).
+
+
-deprecated({rand_uniform, 2, next_major_release}).
%% This should correspond to the similar macro in crypto.c
@@ -87,11 +91,12 @@ stop() ->
application:stop(crypto).
supports()->
- {Hashs, PubKeys, Ciphers} = algorithms(),
+ {Hashs, PubKeys, Ciphers, Macs} = algorithms(),
[{hashs, Hashs},
{ciphers, Ciphers},
- {public_keys, PubKeys}
+ {public_keys, PubKeys},
+ {macs, Macs}
].
info_lib() -> ?nif_stub.
@@ -388,36 +393,31 @@ mod_pow(Base, Exponent, Prime) ->
<<0>> -> error;
R -> R
end.
-verify(dss, none, Data, Signature, Key) when is_binary(Data) ->
- verify(dss, sha, {digest, Data}, Signature, Key);
-verify(Alg, Type, Data, Signature, Key) when is_binary(Data) ->
- verify(Alg, Type, {digest, hash(Type, Data)}, Signature, Key);
-verify(dss, Type, {digest, Digest}, Signature, Key) ->
- dss_verify_nif(Type, Digest, Signature, map_ensure_int_as_bin(Key));
-verify(rsa, Type, {digest, Digest}, Signature, Key) ->
- notsup_to_error(
- rsa_verify_nif(Type, Digest, Signature, map_ensure_int_as_bin(Key)));
-verify(ecdsa, Type, {digest, Digest}, Signature, [Key, Curve]) ->
- notsup_to_error(
- ecdsa_verify_nif(Type, Digest, Signature, nif_curve_params(Curve), ensure_int_as_bin(Key))).
-sign(dss, none, Data, Key) when is_binary(Data) ->
- sign(dss, sha, {digest, Data}, Key);
-sign(Alg, Type, Data, Key) when is_binary(Data) ->
- sign(Alg, Type, {digest, hash(Type, Data)}, Key);
-sign(rsa, Type, {digest, Digest}, Key) ->
- case rsa_sign_nif(Type, Digest, map_ensure_int_as_bin(Key)) of
- error -> erlang:error(badkey, [rsa, Type, {digest, Digest}, Key]);
- Sign -> Sign
- end;
-sign(dss, Type, {digest, Digest}, Key) ->
- case dss_sign_nif(Type, Digest, map_ensure_int_as_bin(Key)) of
- error -> erlang:error(badkey, [dss, Type, {digest, Digest}, Key]);
- Sign -> Sign
- end;
-sign(ecdsa, Type, {digest, Digest}, [Key, Curve]) ->
- case ecdsa_sign_nif(Type, Digest, nif_curve_params(Curve), ensure_int_as_bin(Key)) of
- error -> erlang:error(badkey, [ecdsa, Type, {digest, Digest}, [Key, Curve]]);
- Sign -> Sign
+
+verify(Algorithm, Type, Data, Signature, Key) ->
+ verify(Algorithm, Type, Data, Signature, Key, []).
+
+%% Backwards compatible
+verify(Algorithm = dss, none, Digest, Signature, Key, Options) ->
+ verify(Algorithm, sha, {digest, Digest}, Signature, Key, Options);
+verify(Algorithm, Type, Data, Signature, Key, Options) ->
+ case pkey_verify_nif(Algorithm, Type, Data, Signature, format_pkey(Algorithm, Key), Options) of
+ notsup -> erlang:error(notsup);
+ Boolean -> Boolean
+ end.
+
+
+sign(Algorithm, Type, Data, Key) ->
+ sign(Algorithm, Type, Data, Key, []).
+
+%% Backwards compatible
+sign(Algorithm = dss, none, Digest, Key, Options) ->
+ sign(Algorithm, sha, {digest, Digest}, Key, Options);
+sign(Algorithm, Type, Data, Key, Options) ->
+ case pkey_sign_nif(Algorithm, Type, Data, format_pkey(Algorithm, Key), Options) of
+ error -> erlang:error(badkey, [Algorithm, Type, Data, Key, Options]);
+ notsup -> erlang:error(notsup);
+ Signature -> Signature
end.
-spec public_encrypt(rsa, binary(), [binary()], rsa_padding()) ->
@@ -838,13 +838,9 @@ srp_value_B_nif(_Multiplier, _Verifier, _Generator, _Exponent, _Prime) -> ?nif_s
%% Digital signatures --------------------------------------------------------------------
-rsa_sign_nif(_Type,_Digest,_Key) -> ?nif_stub.
-dss_sign_nif(_Type,_Digest,_Key) -> ?nif_stub.
-ecdsa_sign_nif(_Type, _Digest, _Curve, _Key) -> ?nif_stub.
-dss_verify_nif(_Type, _Digest, _Signature, _Key) -> ?nif_stub.
-rsa_verify_nif(_Type, _Digest, _Signature, _Key) -> ?nif_stub.
-ecdsa_verify_nif(_Type, _Digest, _Signature, _Curve, _Key) -> ?nif_stub.
+pkey_sign_nif(_Algorithm, _Type, _Digest, _Key, _Options) -> ?nif_stub.
+pkey_verify_nif(_Algorithm, _Type, _Data, _Signature, _Key, _Options) -> ?nif_stub.
%% Public Keys --------------------------------------------------------------------
%% RSA Rivest-Shamir-Adleman functions
@@ -961,6 +957,15 @@ ensure_int_as_bin(Int) when is_integer(Int) ->
ensure_int_as_bin(Bin) ->
Bin.
+format_pkey(rsa, Key) ->
+ map_ensure_int_as_bin(Key);
+format_pkey(ecdsa, [Key, Curve]) ->
+ {nif_curve_params(Curve), ensure_int_as_bin(Key)};
+format_pkey(dss, Key) ->
+ map_ensure_int_as_bin(Key);
+format_pkey(_, Key) ->
+ Key.
+
%%--------------------------------------------------------------------
%%
-type rsa_padding() :: 'rsa_pkcs1_padding' | 'rsa_pkcs1_oaep_padding' | 'rsa_no_padding'.
@@ -1003,3 +1008,14 @@ erlint(<<MPIntSize:32/integer,MPIntValue/binary>>) ->
%%
mod_exp_nif(_Base,_Exp,_Mod,_bin_hdr) -> ?nif_stub.
+
+%%%----------------------------------------------------------------
+%% 9470495 == V(0,9,8,zh).
+%% 268435615 == V(1,0,0,i).
+%% 268439663 == V(1,0,1,f).
+
+packed_openssl_version(MAJ, MIN, FIX, P0) ->
+ %% crypto.c
+ P1 = atom_to_list(P0),
+ P = lists:sum([C-$a||C<-P1]),
+ ((((((((MAJ bsl 8) bor MIN) bsl 8 ) bor FIX) bsl 8) bor (P+1)) bsl 4) bor 16#f).
diff --git a/lib/crypto/test/blowfish_SUITE.erl b/lib/crypto/test/blowfish_SUITE.erl
index c2d0d2621b..c9033ac4f8 100644
--- a/lib/crypto/test/blowfish_SUITE.erl
+++ b/lib/crypto/test/blowfish_SUITE.erl
@@ -47,6 +47,11 @@
init_per_suite(Config) ->
case catch crypto:start() of
ok ->
+ catch ct:comment("~s",[element(3,hd(crypto:info_lib()))]),
+ catch ct:log("crypto:info_lib() -> ~p~n"
+ "crypto:supports() -> ~p~n"
+ "crypto:version() -> ~p~n"
+ ,[crypto:info_lib(), crypto:supports(), crypto:version()]),
Config;
_Else ->
{skip,"Could not start crypto!"}
diff --git a/lib/crypto/test/crypto_SUITE.erl b/lib/crypto/test/crypto_SUITE.erl
index 164f43dcb0..88f13d766c 100644
--- a/lib/crypto/test/crypto_SUITE.erl
+++ b/lib/crypto/test/crypto_SUITE.erl
@@ -177,6 +177,12 @@ init_per_suite(Config) ->
try crypto:start() of
ok ->
+ catch ct:comment("~s",[element(3,hd(crypto:info_lib()))]),
+ catch ct:log("crypto:info_lib() -> ~p~n"
+ "crypto:supports() -> ~p~n"
+ "crypto:version() -> ~p~n"
+ ,[crypto:info_lib(), crypto:supports(), crypto:version()]),
+
try crypto:strong_rand_bytes(1) of
_ ->
Config
@@ -745,10 +751,44 @@ do_sign_verify({Type, Hash, Public, Private, Msg}) ->
Signature = crypto:sign(Type, Hash, Msg, Private),
case crypto:verify(Type, Hash, Msg, Signature, Public) of
true ->
+ ct:log("OK crypto:sign(~p, ~p, ..., ..., ...)", [Type,Hash]),
negative_verify(Type, Hash, Msg, <<10,20>>, Public);
false ->
+ ct:log("ERROR crypto:sign(~p, ~p, ..., ..., ...)", [Type,Hash]),
ct:fail({{crypto, verify, [Type, Hash, Msg, Signature, Public]}})
- end.
+ end;
+do_sign_verify({Type, Hash, Public, Private, Msg, Options}) ->
+ LibVer =
+ case crypto:info_lib() of
+ [{<<"OpenSSL">>,Ver,<<"OpenSSL",_/binary>>}] -> Ver;
+ _ -> infinity
+ end,
+ Pad = proplists:get_value(rsa_padding, Options),
+ NotSupLow = lists:member(Pad, [rsa_pkcs1_pss_padding]),
+ try
+ crypto:sign(Type, Hash, Msg, Private, Options)
+ of
+ Signature ->
+ case crypto:verify(Type, Hash, Msg, Signature, Public, Options) of
+ true ->
+ ct:log("OK crypto:sign(~p, ~p, ..., ..., ..., ~p)", [Type,Hash,Options]),
+ negative_verify(Type, Hash, Msg, <<10,20>>, Public, Options);
+ false ->
+ ct:log("ERROR crypto:sign(~p, ~p, ..., ..., ..., ~p)", [Type,Hash,Options]),
+ ct:fail({{crypto, verify, [Type, Hash, Msg, Signature, Public, Options]}})
+ end
+ catch
+ error:notsup when NotSupLow == true,
+ is_integer(LibVer),
+ LibVer < 16#10001000 ->
+ %% Thoose opts where introduced in 1.0.1
+ ct:log("notsup but OK in old cryptolib crypto:sign(~p, ~p, ..., ..., ..., ~p)",
+ [Type,Hash,Options]),
+ true;
+ C:E ->
+ ct:log("~p:~p crypto:sign(~p, ~p, ..., ..., ..., ~p)", [C,E,Type,Hash,Options]),
+ ct:fail({{crypto, sign_verify, [LibVer, Type, Hash, Msg, Public, Options]}})
+ end.
negative_verify(Type, Hash, Msg, Signature, Public) ->
case crypto:verify(Type, Hash, Msg, Signature, Public) of
@@ -758,6 +798,14 @@ negative_verify(Type, Hash, Msg, Signature, Public) ->
ok
end.
+negative_verify(Type, Hash, Msg, Signature, Public, Options) ->
+ case crypto:verify(Type, Hash, Msg, Signature, Public, Options) of
+ true ->
+ ct:fail({{crypto, verify, [Type, Hash, Msg, Signature, Public, Options]}, should_fail});
+ false ->
+ ok
+ end.
+
do_public_encrypt({Type, Public, Private, Msg, Padding}) ->
PublicEcn = (catch crypto:public_encrypt(Type, Msg, Public, Padding)),
case crypto:private_decrypt(Type, PublicEcn, Private, Padding) of
@@ -1172,13 +1220,29 @@ group_config(dss = Type, Config) ->
Msg = dss_plain(),
Public = dss_params() ++ [dss_public()],
Private = dss_params() ++ [dss_private()],
- SignVerify = [{Type, sha, Public, Private, Msg}],
+ SupportedHashs = proplists:get_value(hashs, crypto:supports(), []),
+ DssHashs =
+ case crypto:info_lib() of
+ [{<<"OpenSSL">>,LibVer,_}] when is_integer(LibVer), LibVer > 16#10001000 ->
+ [sha, sha224, sha256, sha384, sha512];
+ [{<<"OpenSSL">>,LibVer,_}] when is_integer(LibVer), LibVer > 16#10000000 ->
+ [sha, sha224, sha256];
+ _Else ->
+ [sha]
+ end,
+ SignVerify = [{Type, Hash, Public, Private, Msg}
+ || Hash <- DssHashs,
+ lists:member(Hash, SupportedHashs)],
[{sign_verify, SignVerify} | Config];
group_config(ecdsa = Type, Config) ->
{Private, Public} = ec_key_named(),
Msg = ec_msg(),
- SignVerify = [{Type, sha, Public, Private, Msg}],
+ SupportedHashs = proplists:get_value(hashs, crypto:supports(), []),
+ DssHashs = [sha, sha224, sha256, sha384, sha512],
+ SignVerify = [{Type, Hash, Public, Private, Msg}
+ || Hash <- DssHashs,
+ lists:member(Hash, SupportedHashs)],
[{sign_verify, SignVerify} | Config];
group_config(srp, Config) ->
GenerateCompute = [srp3(), srp6(), srp6a(), srp6a_smaller_prime()],
@@ -1262,18 +1326,38 @@ group_config(_, Config) ->
Config.
sign_verify_tests(Type, Msg, Public, Private, PublicS, PrivateS) ->
- sign_verify_tests(Type, [md5, sha, sha224, sha256], Msg, Public, Private) ++
- sign_verify_tests(Type, [sha384, sha512], Msg, PublicS, PrivateS).
-
-sign_verify_tests(Type, Hashs, Msg, Public, Private) ->
- lists:foldl(fun(Hash, Acc) ->
- case is_supported(Hash) of
- true ->
- [{Type, Hash, Public, Private, Msg}|Acc];
- false ->
- Acc
- end
- end, [], Hashs).
+ gen_sign_verify_tests(Type, [md5, ripemd160, sha, sha224, sha256], Msg, Public, Private,
+ [undefined,
+ [{rsa_padding, rsa_pkcs1_pss_padding}],
+ [{rsa_padding, rsa_pkcs1_pss_padding}, {rsa_pss_saltlen, 0}],
+ [{rsa_padding, rsa_x931_padding}]
+ ]) ++
+ gen_sign_verify_tests(Type, [sha384, sha512], Msg, PublicS, PrivateS,
+ [undefined,
+ [{rsa_padding, rsa_pkcs1_pss_padding}],
+ [{rsa_padding, rsa_pkcs1_pss_padding}, {rsa_pss_saltlen, 0}],
+ [{rsa_padding, rsa_x931_padding}]
+ ]).
+
+gen_sign_verify_tests(Type, Hashs, Msg, Public, Private, Opts) ->
+ lists:foldr(fun(Hash, Acc0) ->
+ case is_supported(Hash) of
+ true ->
+ lists:foldr(fun
+ (undefined, Acc1) ->
+ [{Type, Hash, Public, Private, Msg} | Acc1];
+ ([{rsa_padding, rsa_x931_padding} | _], Acc1)
+ when Hash =:= md5
+ orelse Hash =:= ripemd160
+ orelse Hash =:= sha224 ->
+ Acc1;
+ (Opt, Acc1) ->
+ [{Type, Hash, Public, Private, Msg, Opt} | Acc1]
+ end, Acc0, Opts);
+ false ->
+ Acc0
+ end
+ end, [], Hashs).
rfc_1321_msgs() ->
[<<"">>,
@@ -2294,7 +2378,7 @@ fmt_words(Words) ->
log_rsp_size(Label, Term) ->
S = erts_debug:size(Term),
- ct:pal("~s: ~w test(s), Memory used: ~s",
+ ct:log("~s: ~w test(s), Memory used: ~s",
[Label, length(Term), fmt_words(S)]).
read_rsp(Config, Type, Files) ->
diff --git a/lib/debugger/src/dbg_ieval.erl b/lib/debugger/src/dbg_ieval.erl
index 88c7caacb0..8009d62629 100644
--- a/lib/debugger/src/dbg_ieval.erl
+++ b/lib/debugger/src/dbg_ieval.erl
@@ -353,15 +353,15 @@ format_trace(What, Args, P) ->
{Called, {Le,Li,M,F,As}} = Args,
case Called of
extern ->
- io_lib:format("++ (~w) <~w> ~w:~w~ts~n",
+ io_lib:format("++ (~w) <~w> ~w:~tw~ts~n",
[Le,Li,M,F,format_args(As, P)]);
local ->
- io_lib:format("++ (~w) <~w> ~w~ts~n",
+ io_lib:format("++ (~w) <~w> ~tw~ts~n",
[Le,Li,F,format_args(As, P)])
end;
call_fun ->
{Le,Li,F,As} = Args,
- io_lib:format("++ (~w) <~w> ~w~ts~n",
+ io_lib:format("++ (~w) <~w> ~tw~ts~n",
[Le, Li, F, format_args(As, P)]);
return ->
{Le,Val} = Args,
@@ -370,7 +370,7 @@ format_trace(What, Args, P) ->
bif ->
{Le,Li,M,F,As} = Args,
- io_lib:format("++ (~w) <~w> ~w:~w~ts~n",
+ io_lib:format("++ (~w) <~w> ~w:~tw~ts~n",
[Le, Li, M, F, format_args(As, P)])
end.
diff --git a/lib/debugger/src/dbg_wx_break_win.erl b/lib/debugger/src/dbg_wx_break_win.erl
index 770681510d..10e9272254 100644
--- a/lib/debugger/src/dbg_wx_break_win.erl
+++ b/lib/debugger/src/dbg_wx_break_win.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -159,7 +159,7 @@ create_win(Parent, Pos, Type, Mod, Line) ->
%%--------------------------------------------------------------------
update_functions(WinInfo, Funcs) ->
Items = lists:map(fun([N, A]) ->
- lists:flatten(io_lib:format("~p/~p", [N,A]))
+ lists:flatten(io_lib:format("~tw/~w", [N,A]))
end,
Funcs),
wxListBox:set(WinInfo#winInfo.listbox, Items),
diff --git a/lib/debugger/src/dbg_wx_mon_win.erl b/lib/debugger/src/dbg_wx_mon_win.erl
index 9737c9e67f..fcd954454b 100644
--- a/lib/debugger/src/dbg_wx_mon_win.erl
+++ b/lib/debugger/src/dbg_wx_mon_win.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -342,7 +342,7 @@ add_process(WinInfo, Pid, Name, {Mod,Func,Args}, Status, Info) ->
Row = (WinInfo#winInfo.row),
Name2 = case Name of undefined -> ""; _ -> to_string(Name) end,
- FuncS = to_string("~w:~w/~w", [Mod, Func, length(Args)]),
+ FuncS = to_string("~w:~tw/~w", [Mod, Func, length(Args)]),
Info2 = case Info of {} -> ""; _ -> to_string(Info) end,
Pid2 = to_string("~p",[Pid]),
diff --git a/lib/debugger/src/dbg_wx_trace.erl b/lib/debugger/src/dbg_wx_trace.erl
index f4ee30618c..b1e0e03b4c 100644
--- a/lib/debugger/src/dbg_wx_trace.erl
+++ b/lib/debugger/src/dbg_wx_trace.erl
@@ -345,11 +345,12 @@ gui_cmd('Back Trace', State) ->
P = p(State),
lists:foreach(
fun({Le, {Mod,Func,Args}}) ->
- Str = io_lib:format("~p > ~p:~p"++P++"~n",
- [Le, Mod, Func, Args]),
+ Str = io_lib:format("~p > ~w:~tw~ts\n",
+ [Le, Mod, Func, format_args(Args, P)]),
dbg_wx_trace_win:trace_output(State#state.win,Str);
({Le, {Fun,Args}}) ->
- Str = io_lib:format("~p > ~p"++P++"~n", [Le, Fun, Args]),
+ Str = io_lib:format("~p > ~p~ts~n",
+ [Le, Fun, format_args(Args, P)]),
dbg_wx_trace_win:trace_output(State#state.win,Str);
(_) -> ignore
end,
@@ -539,6 +540,18 @@ add_break(WI, Coords, Type, Mod, Line) ->
Win = dbg_wx_trace_win:get_window(WI),
dbg_wx_break:start(Win, Coords, Type, Mod, Line).
+format_args(As, P) when is_list(As) ->
+ [$(,format_args1(As, P),$)];
+format_args(A, P) ->
+ [$/,io_lib:format(P, [A])].
+
+format_args1([A], P) ->
+ [io_lib:format(P, [A])];
+format_args1([A|As], P) ->
+ [io_lib:format(P, [A]),$,|format_args1(As, P)];
+format_args1([], _) ->
+ [].
+
%%--Commands from the interpreter-------------------------------------
int_cmd({interpret, Mod}, State) ->
diff --git a/lib/debugger/src/dbg_wx_win.erl b/lib/debugger/src/dbg_wx_win.erl
index f1298154ab..9f59915476 100644
--- a/lib/debugger/src/dbg_wx_win.erl
+++ b/lib/debugger/src/dbg_wx_win.erl
@@ -299,7 +299,7 @@ open_help(_Parent, HelpHtmlFile) ->
%%--------------------------------------------------------------------
to_string(Atom) when is_atom(Atom) ->
- atom_to_list(Atom);
+ io_lib:format("~tw", [Atom]);
to_string(Integer) when is_integer(Integer) ->
integer_to_list(Integer);
to_string([]) -> "";
diff --git a/lib/debugger/src/i.erl b/lib/debugger/src/i.erl
index 2da3e77618..62ce8d0e20 100644
--- a/lib/debugger/src/i.erl
+++ b/lib/debugger/src/i.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1998-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1998-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -307,13 +307,13 @@ ip() ->
ip([{Pid,{M,F,A},Status,{}}|Stats]) ->
hformat(io_lib:format("~w",[Pid]),
- io_lib:format("~p:~p/~p",[M,F,length(A)]),
+ io_lib:format("~w:~tw/~w",[M,F,length(A)]),
io_lib:format("~w",[Status]),
""),
ip(Stats);
ip([{Pid,{M,F,A},Status,Info}|Stats]) ->
hformat(io_lib:format("~w",[Pid]),
- io_lib:format("~p:~p/~p",[M,F,length(A)]),
+ io_lib:format("~w:~tw/~w",[M,F,length(A)]),
io_lib:format("~w",[Status]),
io_lib:format("~w",[Info])),
ip(Stats);
@@ -321,7 +321,7 @@ ip([]) ->
ok.
hformat(A1, A2, A3, A4) ->
- format("~-12s ~-21s ~-9s ~-21s~n", [A1,A2,A3,A4]).
+ format("~-12s ~-21ts ~-9s ~-21s~n", [A1,A2,A3,A4]).
%% -------------------------------------------
diff --git a/lib/dialyzer/doc/src/notes.xml b/lib/dialyzer/doc/src/notes.xml
index 0d2cb6c4df..c26b7aab5e 100644
--- a/lib/dialyzer/doc/src/notes.xml
+++ b/lib/dialyzer/doc/src/notes.xml
@@ -32,6 +32,21 @@
<p>This document describes the changes made to the Dialyzer
application.</p>
+<section><title>Dialyzer 3.2.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> Fix a bug where merging PLT:s could lose info. The
+ bug was introduced in Erlang/OTP 20.0. </p>
+ <p>
+ Own Id: OTP-14558 Aux Id: ERIERL-53 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Dialyzer 3.2</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/dialyzer/src/dialyzer.erl b/lib/dialyzer/src/dialyzer.erl
index c319acb2fb..1538174d4a 100644
--- a/lib/dialyzer/src/dialyzer.erl
+++ b/lib/dialyzer/src/dialyzer.erl
@@ -498,24 +498,24 @@ call_or_apply_to_string(ArgNs, FailReason, SigArgs, SigRet,
true ->
%% We do not know which argument(s) caused the failure
io_lib:format("will never return since the success typing arguments"
- " are ~s\n", [SigArgs]);
+ " are ~ts\n", [SigArgs]);
false ->
io_lib:format("will never return since it differs in the ~s argument"
- " from the success typing arguments: ~s\n",
+ " from the success typing arguments: ~ts\n",
[PositionString, SigArgs])
end;
only_contract ->
case (ArgNs =:= []) orelse IsOverloaded of
true ->
%% We do not know which arguments caused the failure
- io_lib:format("breaks the contract ~s\n", [Contract]);
+ io_lib:format("breaks the contract ~ts\n", [Contract]);
false ->
- io_lib:format("breaks the contract ~s in the ~s argument\n",
+ io_lib:format("breaks the contract ~ts in the ~s argument\n",
[Contract, PositionString])
end;
both ->
- io_lib:format("will never return since the success typing is ~s -> ~s"
- " and the contract is ~s\n", [SigArgs, SigRet, Contract])
+ io_lib:format("will never return since the success typing is ~ts -> ~ts"
+ " and the contract is ~ts\n", [SigArgs, SigRet, Contract])
end.
form_positions(ArgNs) ->
@@ -533,9 +533,9 @@ form_positions(ArgNs) ->
form_expected_without_opaque([{N, T, TStr}]) ->
case erl_types:t_is_opaque(T) of
true ->
- io_lib:format("an opaque term of type ~s as ", [TStr]);
+ io_lib:format("an opaque term of type ~ts as ", [TStr]);
false ->
- io_lib:format("a term of type ~s (with opaque subterms) as ", [TStr])
+ io_lib:format("a term of type ~ts (with opaque subterms) as ", [TStr])
end ++ form_position_string([N]) ++ " argument";
form_expected_without_opaque(ExpectedTriples) -> %% TODO: can do much better here
{ArgNs, _Ts, _TStrs} = lists:unzip3(ExpectedTriples),
@@ -546,8 +546,8 @@ form_expected(ExpectedArgs) ->
[T] ->
TS = erl_types:t_to_string(T),
case erl_types:t_is_opaque(T) of
- true -> io_lib:format("an opaque term of type ~s is expected", [TS]);
- false -> io_lib:format("a structured term of type ~s is expected", [TS])
+ true -> io_lib:format("an opaque term of type ~ts is expected", [TS]);
+ false -> io_lib:format("a structured term of type ~ts is expected", [TS])
end;
[_,_|_] -> "terms of different types are expected in these positions"
end.
diff --git a/lib/dialyzer/src/dialyzer_cl_parse.erl b/lib/dialyzer/src/dialyzer_cl_parse.erl
index a456d38e64..80c10183cf 100644
--- a/lib/dialyzer/src/dialyzer_cl_parse.erl
+++ b/lib/dialyzer/src/dialyzer_cl_parse.erl
@@ -82,7 +82,7 @@ cl(["--get_warnings"|T]) ->
cl(["-D"|_]) ->
cl_error("No defines specified after -D");
cl(["-D"++Define|T]) ->
- Def = re:split(Define, "=", [{return, list}]),
+ Def = re:split(Define, "=", [{return, list}, unicode]),
append_defines(Def),
cl(T);
cl(["-h"|_]) ->
diff --git a/lib/dialyzer/src/dialyzer_contracts.erl b/lib/dialyzer/src/dialyzer_contracts.erl
index b554ebc2cc..e72c1aecfc 100644
--- a/lib/dialyzer/src/dialyzer_contracts.erl
+++ b/lib/dialyzer/src/dialyzer_contracts.erl
@@ -555,6 +555,9 @@ from_form_with_check(Form, ExpTypes, MFA, RecordTable, VarTable, Cache) ->
Site = {spec, MFA},
C1 = erl_types:t_check_record_fields(Form, ExpTypes, Site, RecordTable,
VarTable, Cache),
+ %% The check costs some time, and with the assumption that contracts
+ %% are not very deep, it does not add anything.
+ %% erl_types:t_from_form_check_remote(Form, ExpTypes, MFA, RecordTable),
erl_types:t_from_form(Form, ExpTypes, Site, RecordTable, VarTable, C1).
constraints_to_dict(Constrs, MFA, RecDict, ExpTypes, RecordTable,
@@ -840,7 +843,7 @@ is_remote_types_related(Contract, CSig, Sig, MFA, RecDict) ->
t_from_forms_without_remote([{FType, []}], MFA, RecDict) ->
Site = {spec, MFA},
- {Type1, _} = erl_types:t_from_form_without_remote(FType, Site, RecDict),
+ Type1 = erl_types:t_from_form_without_remote(FType, Site, RecDict),
{ok, erl_types:subst_all_vars_to_any(Type1)};
t_from_forms_without_remote([{_FType, _Constrs}], _MFA, _RecDict) ->
%% 'When' constraints
diff --git a/lib/dialyzer/src/dialyzer_gui_wx.erl b/lib/dialyzer/src/dialyzer_gui_wx.erl
index 538327d4d1..b4b1872c12 100644
--- a/lib/dialyzer/src/dialyzer_gui_wx.erl
+++ b/lib/dialyzer/src/dialyzer_gui_wx.erl
@@ -1093,7 +1093,7 @@ macro_loop(Options, Win, Box, MacroText, TermText, Frame) ->
Fun =
fun(X) ->
Val = wxControlWithItems:getString(Box,X),
- [MacroName|_] = re:split(Val, " ", [{return, list}]),
+ [MacroName|_] = re:split(Val, " ", [{return, list}, unicode]),
list_to_atom(MacroName)
end,
Delete = [Fun(X) || X <- List],
diff --git a/lib/dialyzer/src/dialyzer_plt.erl b/lib/dialyzer/src/dialyzer_plt.erl
index 47994fc35b..95c8b5ebce 100644
--- a/lib/dialyzer/src/dialyzer_plt.erl
+++ b/lib/dialyzer/src/dialyzer_plt.erl
@@ -775,8 +775,13 @@ merge_tables(T1, T2) ->
tab_merge(ets:first(T1), T1, T2).
tab_merge('$end_of_table', T1, T2) ->
- true = ets:delete(T1),
- T2;
+ case ets:first(T1) of % no safe_fixtable()...
+ '$end_of_table' ->
+ true = ets:delete(T1),
+ T2;
+ Key ->
+ tab_merge(Key, T1, T2)
+ end;
tab_merge(K1, T1, T2) ->
Vs = ets:lookup(T1, K1),
NextK1 = ets:next(T1, K1),
diff --git a/lib/dialyzer/src/dialyzer_utils.erl b/lib/dialyzer/src/dialyzer_utils.erl
index 511a6d66bf..9b8fbc67eb 100644
--- a/lib/dialyzer/src/dialyzer_utils.erl
+++ b/lib/dialyzer/src/dialyzer_utils.erl
@@ -326,9 +326,12 @@ process_record_remote_types(CServer) ->
{record, Name} ->
FieldFun =
fun({Arity, Fields}, C4) ->
- Site = {record, {Module, Name, Arity}},
+ MRA = {Module, Name, Arity},
+ Site = {record, MRA},
{Fields1, C7} =
lists:mapfoldl(fun({FieldName, Field, _}, C5) ->
+ check_remote(Field, ExpTypes,
+ MRA, RecordTable),
{FieldT, C6} =
erl_types:t_from_form
(Field, ExpTypes, Site,
@@ -342,18 +345,12 @@ process_record_remote_types(CServer) ->
{FieldsList, C3} =
lists:mapfoldl(FieldFun, C2, orddict:to_list(Fields)),
{{Key, {FileLine, orddict:from_list(FieldsList)}}, C3};
- {type, Name, NArgs} ->
+ {_TypeOrOpaque, Name, NArgs} ->
%% Make sure warnings about unknown types are output
%% also for types unused by specs.
- Site = {type, {Module, Name, NArgs}},
- L = erl_anno:new(0),
- Args = lists:duplicate(NArgs, {var, L, '_'}),
- UserType = {user_type, L, Name, Args},
- {_NewType, C3} =
- erl_types:t_from_form(UserType, ExpTypes, Site,
- RecordTable, VarTable, C2),
- {{Key, Value}, C3};
- {opaque, _Name, _NArgs} ->
+ MTA = {Module, Name, NArgs},
+ {{_Module, _FileLine, Form, _ArgNames}, _Type} = Value,
+ check_remote(Form, ExpTypes, MTA, RecordTable),
{{Key, Value}, C2}
end
end,
@@ -454,6 +451,9 @@ msg_with_position(Fun, FileLine) ->
throw({error, NewMsg})
end.
+check_remote(Form, ExpTypes, What, RecordTable) ->
+ erl_types:t_from_form_check_remote(Form, ExpTypes, What, RecordTable).
+
-spec merge_types(codeserver(), dialyzer_plt:plt()) -> codeserver().
merge_types(CServer, Plt) ->
diff --git a/lib/dialyzer/src/typer.erl b/lib/dialyzer/src/typer.erl
index bf5484e5f6..16b9c8a94a 100644
--- a/lib/dialyzer/src/typer.erl
+++ b/lib/dialyzer/src/typer.erl
@@ -74,7 +74,8 @@
-spec start() -> no_return().
start() ->
-_ = io:setopts(standard_error, [{encoding,unicode}]),
+ _ = io:setopts(standard_error, [{encoding,unicode}]),
+ _ = io:setopts([{encoding,unicode}]),
{Args, Analysis} = process_cl_args(),
%% io:format("Args: ~p\n", [Args]),
%% io:format("Analysis: ~p\n", [Analysis]),
@@ -484,12 +485,12 @@ write_typed_file(File, Info) ->
write_typed_file(File, Info, NewFileName) ->
{ok, Binary} = file:read_file(File),
- Chars = binary_to_list(Binary),
+ Chars = unicode:characters_to_list(Binary),
write_typed_file(Chars, NewFileName, Info, 1, []),
io:format(" Saved as: ~tp\n", [NewFileName]).
write_typed_file(Chars, File, #info{functions = []}, _LNo, _Acc) ->
- ok = file:write_file(File, list_to_binary(Chars), [append]);
+ ok = file:write_file(File, unicode:characters_to_binary(Chars), [append]);
write_typed_file([Ch|Chs] = Chars, File, Info, LineNo, Acc) ->
[{Line,F,A}|RestFuncs] = Info#info.functions,
case Line of
@@ -519,7 +520,7 @@ write_typed_file([Ch|Chs] = Chars, File, Info, LineNo, Acc) ->
raw_write(F, A, Info, File, Content) ->
TypeInfo = get_type_string(F, A, Info, file),
ContentList = lists:reverse(Content) ++ TypeInfo ++ "\n",
- ContentBin = list_to_binary(ContentList),
+ ContentBin = unicode:characters_to_binary(ContentList),
file:write_file(File, ContentBin, [append]).
get_type_string(F, A, Info, Mode) ->
@@ -608,7 +609,7 @@ cl(["-D"++Def|Opts]) ->
case Def of
"" -> fatal_error("no variable name specified after -D");
_ ->
- DefPair = process_def_list(re:split(Def, "=", [{return, list}])),
+ DefPair = process_def_list(re:split(Def, "=", [{return, list}, unicode])),
{{def, DefPair}, Opts}
end;
cl(["-I",Dir|Opts]) -> {{inc, Dir}, Opts};
@@ -697,7 +698,7 @@ get_all_files(#args{files = Fs, files_r = Ds}) ->
test_erl_file_exclude_ann(File) ->
case is_erl_file(File) of
true -> %% Exclude files ending with ".ann.erl"
- case re:run(File, "[\.]ann[\.]erl$") of
+ case re:run(File, "[\.]ann[\.]erl$", [unicode]) of
{match, _} -> false;
nomatch -> true
end;
diff --git a/lib/dialyzer/test/map_SUITE_data/results/loop b/lib/dialyzer/test/map_SUITE_data/results/loop
new file mode 100644
index 0000000000..2e956a5709
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/results/loop
@@ -0,0 +1,4 @@
+
+loop.erl:63: The call loop:start_timer(#loop{state::'idle' | 'waiting',queues::#{'category1'=>#queue{limit::non_neg_integer(),buffer::[any()]}, 'category2'=>#queue{limit::non_neg_integer(),buffer::[any()]}},counters::#{'counter1':=10, 2:=10}}) does not have a term of type #loop{state::'idle' | 'waiting',timer::timer:tref(),queues::#{'category1'=>#queue{limit::non_neg_integer(),buffer::[any()]}, 'category2'=>#queue{limit::non_neg_integer(),buffer::[any()]}},counters::#{'counter1'=>non_neg_integer(), 2=>non_neg_integer()}} (with opaque subterms) as 1st argument
+loop.erl:67: Function wait/1 has no local return
+loop.erl:85: Record construction #loop{state::'idle' | 'waiting',timer::{'error',_} | {'ok',timer:tref()},queues::#{'category1'=>#queue{limit::non_neg_integer(),buffer::[any()]}, 'category2'=>#queue{limit::non_neg_integer(),buffer::[any()]}},counters::#{'counter1'=>non_neg_integer(), 2=>non_neg_integer()}} violates the declared type of field timer::'undefined' | timer:tref()
diff --git a/lib/dialyzer/test/map_SUITE_data/results/map_galore b/lib/dialyzer/test/map_SUITE_data/results/map_galore
index c34ba5cf30..9a140de255 100644
--- a/lib/dialyzer/test/map_SUITE_data/results/map_galore
+++ b/lib/dialyzer/test/map_SUITE_data/results/map_galore
@@ -1,11 +1,11 @@
map_galore.erl:1000: A key of type 42 cannot exist in a map of type #{1:='a', 2:='b', 4:='d', 5:='e', float()=>'c' | 'v'}
-map_galore.erl:1080: A key of type 'nonexisting' cannot exist in a map of type #{#{'map':='key', 'one':='small'}:=[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}:=[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}:=[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 16=>'a6', 26=>'b6', 36=>[54 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}
-map_galore.erl:1082: A key of type 42 cannot exist in a map of type #{#{'map':='key', 'one':='small'}:=[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}:=[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}:=[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 16=>'a6', 26=>'b6', 36=>[54 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}
-map_galore.erl:1140: The call map_galore:map_guard_sequence_1(#{'seq':=6, 'val':=[101,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'seq':=1 | 2 | 3 | 4 | 5, 'val':=[97 | 98 | 99 | 100 | 101,...], #{'map':='key', 'one':='small'}=>[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}=>[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}=>[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[any(),...], 31:=[any(),...], 32:=[any(),...], 33:=[any(),...], 34:=[any(),...], 35:=[any(),...], 37:=[any(),...], 38:=[any(),...], 39:=[any(),...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 16=>'a6', 26=>'b6', 36=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
-map_galore.erl:1141: The call map_galore:map_guard_sequence_2(#{'b':=5}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':='gg' | 'kk' | 'sc' | 3 | 4, 'b'=>'other' | 3 | 4 | 5, 'c'=>'sc2', #{'map':='key', 'one':='small'}=>[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}=>[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}=>[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[any(),...], 31:=[any(),...], 32:=[any(),...], 33:=[any(),...], 34:=[any(),...], 35:=[any(),...], 37:=[any(),...], 38:=[any(),...], 39:=[any(),...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 16=>'a6', 26=>'b6', 36=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
-map_galore.erl:1209: The call map_galore:map_guard_sequence_1(#{'seq':=6, 'val':=[101,...], #{'map':='key', 'one':='small'}:=[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}:=[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}:=[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | 3,...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 16=>'a6', 26=>'b6', 36=>[54 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'seq':=1 | 2 | 3 | 4 | 5, 'val':=[97 | 98 | 99 | 100 | 101,...], #{'map':='key', 'one':='small'}=>[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}=>[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}=>[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[any(),...], 31:=[any(),...], 32:=[any(),...], 33:=[any(),...], 34:=[any(),...], 35:=[any(),...], 37:=[any(),...], 38:=[any(),...], 39:=[any(),...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 16=>'a6', 26=>'b6', 36=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
-map_galore.erl:1210: The call map_galore:map_guard_sequence_2(#{'b':=5, #{'map':='key', 'one':='small'}:=[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}:=[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}:=[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | 3,...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 16=>'a6', 26=>'b6', 36=>[54 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':='gg' | 'kk' | 'sc' | 3 | 4, 'b'=>'other' | 3 | 4 | 5, 'c'=>'sc2', #{'map':='key', 'one':='small'}=>[32 | 49 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'second':='small'}=>[32 | 50 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], #{'map':='key', 'third':='small'}=>[32 | 51 | 97 | 101 | 107 | 108 | 109 | 112 | 115 | 121,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 27:='b7', 28:='b8', 29:='b9', 30:=[any(),...], 31:=[any(),...], 32:=[any(),...], 33:=[any(),...], 34:=[any(),...], 35:=[any(),...], 37:=[any(),...], 38:=[any(),...], 39:=[any(),...], 'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 16=>'a6', 26=>'b6', 36=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
+map_galore.erl:1080: A key of type 'nonexisting' cannot exist in a map of type #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]} | #{'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 'map'=>'key', 'one'=>'small', 'second'=>'small', 'third'=>'small', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}
+map_galore.erl:1082: A key of type 42 cannot exist in a map of type #{10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]} | #{'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 'map'=>'key', 'one'=>'small', 'second'=>'small', 'third'=>'small', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}
+map_galore.erl:1140: The call map_galore:map_guard_sequence_1(#{'seq':=6, 'val':=[101,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'seq':=1 | 2 | 3 | 4 | 5, 'val':=[97 | 98 | 99 | 100 | 101,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 'map'=>'key', 'one'=>'small', 'second'=>'small', 'third'=>'small', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[any(),...], 31=>[any(),...], 32=>[any(),...], 33=>[any(),...], 34=>[any(),...], 35=>[any(),...], 36=>[any(),...], 37=>[any(),...], 38=>[any(),...], 39=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
+map_galore.erl:1141: The call map_galore:map_guard_sequence_2(#{'b':=5}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':='gg' | 'kk' | 'sc' | 3 | 4, 'b'=>'other' | 3 | 4 | 5, 'c'=>'sc2', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 'map'=>'key', 'one'=>'small', 'second'=>'small', 'third'=>'small', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[any(),...], 31=>[any(),...], 32=>[any(),...], 33=>[any(),...], 34=>[any(),...], 35=>[any(),...], 36=>[any(),...], 37=>[any(),...], 38=>[any(),...], 39=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
+map_galore.erl:1209: The call map_galore:map_guard_sequence_1(#{'seq':=6, 'val':=[101,...], 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | 3,...]} | #{'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 'map'=>'key', 'one'=>'small', 'second'=>'small', 'third'=>'small', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'seq':=1 | 2 | 3 | 4 | 5, 'val':=[97 | 98 | 99 | 100 | 101,...], 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 'map'=>'key', 'one'=>'small', 'second'=>'small', 'third'=>'small', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[any(),...], 31=>[any(),...], 32=>[any(),...], 33=>[any(),...], 34=>[any(),...], 35=>[any(),...], 36=>[any(),...], 37=>[any(),...], 38=>[any(),...], 39=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
+map_galore.erl:1210: The call map_galore:map_guard_sequence_2(#{'b':=5, 10:='a0', 11:='a1', 12:='a2', 13:='a3', 14:='a4', 15:='a5', 16:='a6', 17:='a7', 18:='a8', 19:='a9', 20:='b0', 21:='b1', 22:='b2', 23:='b3', 24:='b4', 25:='b5', 26:='b6', 27:='b7', 28:='b8', 29:='b9', 30:=[48 | 99,...], 31:=[49 | 99,...], 32:=[50 | 99,...], 33:=[51 | 99,...], 34:=[52 | 99,...], 35:=[53 | 99,...], 36:=[54 | 99,...], 37:=[55 | 99,...], 38:=[56 | 99,...], 39:=[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | 3,...]} | #{'k16'=>'a6', 'k26'=>'b6', 'k36'=>[54 | 99,...], 'map'=>'key', 'one'=>'small', 'second'=>'small', 'third'=>'small', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | {[[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...],...]}=>[48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 100 | 101,...]}=>atom() | [1..255,...]}) will never return since it differs in the 1st argument from the success typing arguments: (#{'a':='gg' | 'kk' | 'sc' | 3 | 4, 'b'=>'other' | 3 | 4 | 5, 'c'=>'sc2', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[48 | 99,...], 31=>[49 | 99,...], 32=>[50 | 99,...], 33=>[51 | 99,...], 34=>[52 | 99,...], 35=>[53 | 99,...], 36=>[54 | 99,...], 37=>[55 | 99,...], 38=>[56 | 99,...], 39=>[57 | 99,...], <<_:16>> | [48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57,...] | float() | {[any(),...]} | #{'k16'=>'a6', 'k26'=>'b6', 'k36'=>[any(),...], 'map'=>'key', 'one'=>'small', 'second'=>'small', 'third'=>'small', 10=>'a0', 11=>'a1', 12=>'a2', 13=>'a3', 14=>'a4', 15=>'a5', 16=>'a6', 17=>'a7', 18=>'a8', 19=>'a9', 20=>'b0', 21=>'b1', 22=>'b2', 23=>'b3', 24=>'b4', 25=>'b5', 26=>'b6', 27=>'b7', 28=>'b8', 29=>'b9', 30=>[any(),...], 31=>[any(),...], 32=>[any(),...], 33=>[any(),...], 34=>[any(),...], 35=>[any(),...], 36=>[any(),...], 37=>[any(),...], 38=>[any(),...], 39=>[any(),...], <<_:16>> | [any(),...] | {_}=>[any(),...]}=>atom() | [1..255,...]})
map_galore.erl:1418: Fun application with arguments (#{'s':='none', 'v':='none'}) will never return since it differs in the 1st argument from the success typing arguments: (#{'s':='l' | 't' | 'v', 'v':='none' | <<_:16>> | [<<_:16>>,...] | {<<_:16>>,<<_:16>>}})
map_galore.erl:1491: The test #{} =:= #{'a':=1} can never evaluate to 'true'
map_galore.erl:1492: The test #{'a':=1} =:= #{} can never evaluate to 'true'
diff --git a/lib/dialyzer/test/map_SUITE_data/results/opaque_key b/lib/dialyzer/test/map_SUITE_data/results/opaque_key
index 2ae0e0c5c6..8d6379b5e0 100644
--- a/lib/dialyzer/test/map_SUITE_data/results/opaque_key
+++ b/lib/dialyzer/test/map_SUITE_data/results/opaque_key
@@ -1,4 +1,5 @@
+opaque_key_adt.erl:35: Invalid type specification for function opaque_key_adt:s2/0. The success typing is () -> #{3:='a'}
opaque_key_adt.erl:41: Invalid type specification for function opaque_key_adt:s4/0. The success typing is () -> #{1:='a'}
opaque_key_adt.erl:44: Invalid type specification for function opaque_key_adt:s5/0. The success typing is () -> #{2:=3}
opaque_key_adt.erl:56: Invalid type specification for function opaque_key_adt:smt1/0. The success typing is () -> #{3:='a'}
diff --git a/lib/dialyzer/test/map_SUITE_data/src/loop.erl b/lib/dialyzer/test/map_SUITE_data/src/loop.erl
new file mode 100644
index 0000000000..c861052d9f
--- /dev/null
+++ b/lib/dialyzer/test/map_SUITE_data/src/loop.erl
@@ -0,0 +1,92 @@
+-module(loop).
+
+-export([timeout/2]).
+
+-export([idle/2, waiting/2]).
+
+-type request_category() :: category1 | category2.
+
+-type counter() :: counter1 | 2.
+
+-type counters() :: #{counter() => non_neg_integer()}.
+
+-record(queue, {limit = 0 :: non_neg_integer(),
+ buffer = [] :: list()}).
+
+-type request_queues() :: #{request_category() => #queue{}}.
+
+-record(?MODULE,
+ {state = idle :: idle | waiting,
+ timer = undefined :: undefined | timer:tref(),
+ queues = #{category1 => #queue{},
+ category2 => #queue{}} :: request_queues(),
+ counters = new_counters() :: counters()}).
+-spec timeout(Ref, Timer :: timer:tref()) -> {noreply, Ref}.
+timeout(Ref, Timer) ->
+ handle_message(Ref, {timeout, Timer}).
+
+-type message() :: {reset, request_category()}
+ | {timeout, timer:tref()}.
+
+-spec handle_message(Ref, Message :: message()) ->
+ {reply, boolean(), Ref} | {noreply, Ref}.
+handle_message(Ref, Msg) ->
+ MV = #?MODULE{state = State} = get(mv),
+ case apply(?MODULE, State, [Msg, MV]) of
+ {reply, Result, NewMV} ->
+ put(mv, NewMV),
+ {reply, Result, Ref};
+ {noreply, NewMV} ->
+ put(mv, NewMV),
+ {noreply, Ref}
+ end.
+
+-spec idle(Message :: message(), #?MODULE{}) ->
+ {reply, boolean(), #?MODULE{}} | {noreply, #?MODULE{}}.
+idle({reset, Category}, MV = #?MODULE{queues = Queues}) ->
+ case Queues of
+ #{Category := #queue{limit = 0}} ->
+ {reply, false, MV};
+ _ ->
+ wait(MV)
+ end;
+idle(_, MV) ->
+ {noreply, MV}.
+
+-spec waiting(Message :: message(), #?MODULE{}) ->
+ {reply, boolean(), #?MODULE{}} | {noreply, #?MODULE{}}.
+waiting({reset, _Category}, MV = #?MODULE{}) ->
+ NewMV = stop_timer(MV),
+ {noreply, NewMV#?MODULE{state = idle}};
+waiting({timeout, Timer}, #?MODULE{timer = Timer} = MV) ->
+ %% The opaque warning is an effect of the call to timer:send_after().
+ {noreply, start_timer(MV#?MODULE{timer = undefined,
+ counters = new_counters()})}.
+
+-spec wait(#?MODULE{}) -> {noreply, #?MODULE{}}.
+wait(MV) ->
+ {noreply, start_timer(MV#?MODULE{state = waiting})}.
+
+-spec stop_timer(#?MODULE{}) -> #?MODULE{}.
+stop_timer(MV) ->
+ case MV#?MODULE.timer of
+ undefined ->
+ MV;
+ Timer ->
+ timer:cancel(Timer),
+ MV#?MODULE{timer = undefined}
+ end.
+
+-spec start_timer(MV :: #?MODULE{}) -> #?MODULE{}.
+start_timer(MV) ->
+ case MV#?MODULE.timer of
+ undefined ->
+ %% Note: timer:send_after() returns {ok, TRef} | {error, _}.
+ MV#?MODULE{timer = timer:send_after(1000, ?MODULE)};
+ _Timer ->
+ start_timer(stop_timer(MV))
+ end.
+
+-spec new_counters() -> counters().
+new_counters() ->
+ #{counter1 => 10, 2 => 10}.
diff --git a/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl b/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl
index b98c713c6b..9228cfa413 100644
--- a/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl
+++ b/lib/dialyzer/test/map_SUITE_data/src/opaque_key/opaque_key_adt.erl
@@ -33,7 +33,7 @@ s0() -> #{}.
s1() -> #{3 => a}.
-spec s2() -> s(atom() | 3).
-s2() -> #{3 => a}. %% Contract breakage (not found)
+s2() -> #{3 => a}. %% Contract breakage
-spec s3() -> s(atom() | 3).
s3() -> #{3 => 5, a => 6, 7 => 8}.
diff --git a/lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type b/lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type
index 110d896c76..74d2ac33ad 100644
--- a/lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type
+++ b/lib/dialyzer/test/options2_SUITE_data/results/unused_unknown_type
@@ -1,2 +1,2 @@
-:0: Unknown type unknown:type1/0:0: Unknown type unknown:type2/0:0: Unknown type unknown:type3/0 \ No newline at end of file
+:0: Unknown type foo:bar/0:0: Unknown type ofoo:obar/0:0: Unknown type owww:y/0:0: Unknown type rfoo:rbar/0:0: Unknown type unknown:type1/0:0: Unknown type unknown:type2/0:0: Unknown type unknown:type3/0:0: Unknown type xxx:y/0:0: Unknown type yyy:x/0:0: Unknown type zzz:arg/1:0: Unknown type zzz:x/0 \ No newline at end of file
diff --git a/lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl b/lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl
index 90df7d528a..e6f9d2392c 100644
--- a/lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl
+++ b/lib/dialyzer/test/options2_SUITE_data/src/unused_unknown_type.erl
@@ -1,10 +1,40 @@
-module(unused_unknown_type).
+-export([t/0]).
+
-export_type([unused/0]).
+-export_type([wide/0, deep/0]).
+-export_type([owide/0, odeep/0]).
+-export_type([arg/0, rargs1/0, rargs2/0]).
+
-type unused() :: unknown:type1().
--record(unused_rec, {a :: unknown:type2()}).
+-record(unused_rec,
+ {a :: unknown:type2(),
+ b :: {{{{{{{{{{{{{{{{{{{{rfoo:rbar()}}}}}}}}}}}}}}}}}}}}}).
-record(rec, {a}).
-type unused_rec() :: #rec{a :: unknown:type3()}.
+
+-type wide() :: {a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,xxx:y()}.
+-type owide() :: {a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,owww:y()}.
+
+%% Deeper than the hardcoded limit in erl_types.erl of 16.
+-type deep() :: {{{{{{{{{{{{{{{{{{{{foo:bar()}}}}}}}}}}}}}}}}}}}}.
+-type odeep() :: {{{{{{{{{{{{{{{{{{{{ofoo:obar()}}}}}}}}}}}}}}}}}}}}.
+
+-type arg1(A) :: [A].
+-type arg() :: arg1({a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,yyy:x()}).
+
+%% No warning about www:x/0 because parameters are currently not
+%% handled if the parameterized type cannot be found.
+-type rargs1() :: zzz:arg({a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,www:x()}).
+
+-type rargs2() :: dict:dict({a,b,c,d,e,f,g,h,i,j,k,l,m,n,o,p,q,r,s,zzz:x()},
+ any()).
+
+%% No warning. The check is commented out as it takes too long.
+-spec t() -> 'a' | {{{{{{{{{{{{{{{{{{{{sfoo:sbar()}}}}}}}}}}}}}}}}}}}}.
+t() ->
+ a.
diff --git a/lib/dialyzer/vsn.mk b/lib/dialyzer/vsn.mk
index 4a1a7c25a0..866a82ee3e 100644
--- a/lib/dialyzer/vsn.mk
+++ b/lib/dialyzer/vsn.mk
@@ -1 +1 @@
-DIALYZER_VSN = 3.2
+DIALYZER_VSN = 3.2.1
diff --git a/lib/diameter/doc/src/diameter.xml b/lib/diameter/doc/src/diameter.xml
index 2cbe48ecce..6b84b22eb5 100644
--- a/lib/diameter/doc/src/diameter.xml
+++ b/lib/diameter/doc/src/diameter.xml
@@ -397,10 +397,10 @@ from the peer offers it.</p>
Note that each tuple communicates one or more AVP values.
It is an error to specify duplicate tuples.</p>
-<marker id="evaluable"/>
+<marker id="eval"/>
</item>
-<tag><c>evaluable() = {M,F,A} | fun() | [evaluable() | A]</c></tag>
+<tag><c>eval() = {M,F,A} | fun() | [eval() | A]</c></tag>
<item>
<p>
An expression that can be evaluated as a function in the following
@@ -418,7 +418,7 @@ eval(F) ->
</pre>
<p>
-Applying an <c>&evaluable;</c>
+Applying an <c>&eval;</c>
<c>E</c> to an argument list <c>A</c>
is meant in the sense of <c>eval([E|A])</c>.</p>
@@ -484,11 +484,11 @@ Matches only those peers whose Origin-Realm has the
specified value, or all peers if the atom <c>any</c>.</p>
</item>
-<tag><c>{eval, &evaluable;}</c></tag>
+<tag><c>{eval, &eval;}</c></tag>
<item>
<p>
Matches only those peers for which the specified
-<c>&evaluable;</c> returns
+<c>&eval;</c> returns
<c>true</c> when applied to the connection's <c>diameter_caps</c>
record.
Any other return value or exception is equivalent to <c>false</c>.</p>
@@ -650,7 +650,7 @@ Result = ResultCode | {capabilities_cb, CB, ResultCode|discard}
Caps = #diameter_caps{}
Pkt = #diameter_packet{}
ResultCode = integer()
-CB = &evaluable;
+CB = &eval;
</pre>
<p>
@@ -798,15 +798,31 @@ be matched by corresponding &capability; configuration, of
</item>
<tag>
-<marker id="incoming_maxlen"/><c>{incoming_maxlen, 0..16777215}</c></tag>
+<marker id="decode_format"/>
+<c>{decode_format, record | list | map | none}</c></tag>
<item>
<p>
-Bound on the expected size of incoming Diameter messages.
-Messages larger than the specified number of bytes are discarded.</p>
+The format of decoded messages and grouped AVPs in the <c>msg</c> field
+of diameter_packet records and <c>value</c> field of diameter_avp
+records respectively.
+If <c>record</c> then a record whose definition is generated from the
+dictionary file in question.
+If <c>list</c> or <c>map</c> then a <c>[Name | Avps]</c> pair where
+<c>Avps</c> is a list of AVP name/values pairs or a map keyed on
+AVP names respectively.
+If <c>none</c> then the atom-value message name, or <c>undefined</c>
+for a Grouped AVP.
+See also &codec_message;.</p>
<p>
-Defaults to <c>16777215</c>, the maximum value of the 24-bit Message
-Length field in a Diameter Header.</p>
+Defaults to <c>record</c>.</p>
+
+<note>
+<p>
+AVPs are decoded into a list of diameter_avp records in <c>avps</c>
+field of diameter_packet records independently of
+<c>decode_format</c>.</p>
+</note>
</item>
@@ -814,7 +830,7 @@ Length field in a Diameter Header.</p>
| node
| nodes
| [node()]
- | evaluable()}</c></tag>
+ | eval()}</c></tag>
<item>
<p>
The degree to which the service allows multiple transport
@@ -825,7 +841,7 @@ at capabilities exchange.</p>
If <c>[node()]</c> then a connection is rejected if another already
exists on any of the specified nodes.
Types <c>false</c>, <c>node</c>, <c>nodes</c> and
-&evaluable; are equivalent to
+&eval; are equivalent to
<c>[]</c>, <c>[node()]</c>, <c>[node()|nodes()]</c> and the
evaluated value respectively, evaluation of each expression taking
place whenever a new connection is to be established.
@@ -840,7 +856,7 @@ by their own peer and watchdog state machines.</p>
Defaults to <c>nodes</c>.</p>
</item>
-<tag><c>{sequence, {H,N} | &evaluable;}</c></tag>
+<tag><c>{sequence, {H,N} | &eval;}</c></tag>
<item>
<p>
A constant value <c>H</c> for the topmost <c>32-N</c> bits of
@@ -875,7 +891,7 @@ outgoing requests.</p>
</warning>
</item>
-<tag><c>{share_peers, boolean() | [node()] | evaluable()}</c></tag>
+<tag><c>{share_peers, boolean() | [node()] | eval()}</c></tag>
<item>
<p>
Nodes to which peer connections established on the local
@@ -888,7 +904,7 @@ configured to use them: see <c>use_shared_peers</c> below.</p>
If <c>false</c> then peers are not shared.
If <c>[node()]</c> then peers are shared with the specified list of
nodes.
-If <c>evaluable()</c> then peers are shared with the nodes returned
+If <c>eval()</c> then peers are shared with the nodes returned
by the specified function, evaluated whenever a peer connection
becomes available or a remote service requests information about local
connections.
@@ -914,59 +930,36 @@ of a single Diameter node across multiple Erlang nodes.</p>
</note>
</item>
-<tag><c>{spawn_opt, [term()]}</c></tag>
-<item>
-<p>
-Options list passed to &spawn_opt; when spawning a process for an
-incoming Diameter request, unless the transport in question
-specifies another value.
-Options <c>monitor</c> and <c>link</c> are ignored.</p>
-
-<p>
-Defaults to the empty list.</p>
-</item>
-
<tag>
-<marker id="strict_mbit"/><c>{strict_mbit, boolean()}</c></tag>
+<marker id="strict_arities"/><c>{strict_arities, boolean()
+ | encode
+ | decode}</c></tag>
<item>
<p>
-Whether or not to regard an AVP setting the M-bit as erroneous when
-the command grammar in question does not explicitly allow the AVP.
-If <c>true</c> then such AVPs are regarded as 5001 errors,
-DIAMETER_AVP_UNSUPPORTED.
-If <c>false</c> then the M-bit is ignored and policing
-it becomes the receiver's responsibility.</p>
+Whether or not to require that the number of AVPs in a message or
+grouped AVP agree with those specified in the dictionary in question
+when passing messages to &man_app; callbacks.
+If <c>true</c> then mismatches in an outgoing messages cause message
+encoding to fail, while mismatches in an incoming message are reported
+as 5005/5009 errors in the errors field of the diameter_packet record
+passed to &app_handle_request; or &app_handle_answer; callbacks.
+If <c>false</c> then neither error is enforced/detected.
+If <c>encode</c> or <c>decode</c> then errors are only
+enforced/detected on outgoing or incoming messages respectively.</p>
<p>
Defaults to <c>true</c>.</p>
-<warning>
-<p>
-RFC 6733 is unclear about the semantics of the M-bit.
-One the one hand, the CCF specification in section 3.2 documents AVP
-in a command grammar as meaning <em>any</em> arbitrary AVP; on the
-other hand, 1.3.4 states that AVPs setting the M-bit cannot be added
-to an existing command: the modified command must instead be
-placed in a new Diameter application.</p>
-<p>
-The reason for the latter is presumably interoperability:
-allowing arbitrary AVPs setting the M-bit in a command makes its
-interpretation implementation-dependent, since there's no
-guarantee that all implementations will understand the same set of
-arbitrary AVPs in the context of a given command.
-However, interpreting <c>AVP</c> in a command grammar as any
-AVP, regardless of M-bit, renders 1.3.4 meaningless, since the receiver
-can simply ignore any AVP it thinks isn't relevant, regardless of the
-sender's intent.</p>
+<note>
<p>
-Beware of confusing mandatory in the sense of the M-bit with mandatory
-in the sense of the command grammar.
-The former is a semantic requirement: that the receiver understand the
-semantics of the AVP in the context in question.
-The latter is a syntactic requirement: whether or not the AVP must
-occur in the message in question.</p>
-</warning>
-
+Disabling arity checks affects the form of messages at encode/decode.
+In particular, decoded AVPs are represented as lists of values,
+regardless of the AVP's arity (ie. expected number in the message/AVP
+grammar in question), and values are expected to be supplied as lists
+at encode.
+This differs from the historic decode behaviour of representing AVPs
+of arity 1 as bare values, not wrapped in a list.</p>
+</note>
</item>
<tag>
@@ -993,7 +986,27 @@ The default value is for backwards compatibility.</p>
</item>
-<tag><c>{use_shared_peers, boolean() | [node()] | evaluable()}</c></tag>
+<tag>
+<marker id="traffic_counters"/><c>{traffic_counters, boolean()}</c></tag>
+<item>
+<p>
+Whether or not to count application-specific messages; those for which
+&man_app; callbacks take place.
+If false then only messages handled by diameter itself are counted:
+CER/CEA, DWR/DWA, DPR/DPA.</p>
+
+<p>
+Defaults to <c>true</c>.</p>
+
+<note>
+<p>
+Disabling counters is a performance improvement, but means that the
+omitted counters are not returned by &service_info;.</p>
+</note>
+
+</item>
+
+<tag><c>{use_shared_peers, boolean() | [node()] | eval()}</c></tag>
<item>
<p>
Nodes from which communicated peers are made available in
@@ -1003,7 +1016,7 @@ the remote candidates list of &app_pick_peer; callbacks.</p>
If <c>false</c> then remote peers are not used.
If <c>[node()]</c> then only peers from the specified list of nodes
are used.
-If <c>evaluable()</c> then only peers returned by the specified
+If <c>eval()</c> then only peers returned by the specified
function are used, evaluated whenever a remote service communicates
information about an available peer connection.
The value <c>true</c> is equivalent to <c>fun &nodes;</c>.
@@ -1028,6 +1041,15 @@ each node from which requests are sent.</p>
</warning>
</item>
+<tag><c>&transport_opt;</c></tag>
+<item>
+<p>
+Any transport option except <c>applications</c> or
+<c>capabilities</c>.
+Used as defaults for transport configuration, values passed to
+&add_transport; overriding values configured on the service.</p>
+</item>
+
</taglist>
<marker id="transport_opt"/>
@@ -1061,6 +1083,37 @@ implies having to set matching *-Application-Id AVPs in a
</item>
<tag>
+<marker id="avp_dictionaries"/><c>{avp_dictionaries, [module()]}</c></tag>
+<item>
+<p>
+A list of alternate dictionary modules with which to encode/decode
+AVPs that are not defined by the dictionary of the application in
+question.
+At decode, such AVPs are represented as diameter_avp records in the
+<c>'AVP'</c> field of a decoded message or Grouped AVP, the first
+alternate that succeeds in decoding the AVP setting the record's value
+field.
+At encode, values in an <c>'AVP'</c> list can be passed as AVP
+name/value 2-tuples, and it is an encode error for no alternate to
+define the AVP of such a tuple.</p>
+
+<p>
+Defaults to the empty list.</p>
+
+<note>
+<p>
+The motivation for alternate dictionaries is RFC 7683, Diameter
+Overload Indication Conveyance (DOIC), which defines AVPs to
+be piggybacked onto existing application messages rather than defining
+an application of its own.
+The DOIC dictionary is provided by the diameter application, as module
+<c>diameter_gen_doic_rfc7683</c>, but alternate dictionaries can be
+used to encode/decode any set of AVPs not known to an application
+dictionary.</p>
+</note>
+</item>
+
+<tag>
<marker id="capabilities"/><c>{capabilities, [&capability;]}</c></tag>
<item>
<p>
@@ -1075,7 +1128,7 @@ TLS is desired over TCP as implemented by &man_tcp;.</p>
</item>
<tag>
-<marker id="capabilities_cb"/><c>{capabilities_cb, &evaluable;}</c></tag>
+<marker id="capabilities_cb"/><c>{capabilities_cb, &eval;}</c></tag>
<item>
<p>
Callback invoked upon reception of CER/CEA during capabilities
@@ -1169,7 +1222,7 @@ transport.</p>
</item>
<tag>
-<marker id="disconnect_cb"/><c>{disconnect_cb, &evaluable;}</c></tag>
+<marker id="disconnect_cb"/><c>{disconnect_cb, &eval;}</c></tag>
<item>
<p>
Callback invoked prior to terminating the transport process of a
@@ -1269,6 +1322,19 @@ Defaults to 5000.</p>
</item>
<tag>
+<marker id="incoming_maxlen"/><c>{incoming_maxlen, 0..16777215}</c></tag>
+<item>
+<p>
+Bound on the expected size of incoming Diameter messages.
+Messages larger than the specified number of bytes are discarded.</p>
+
+<p>
+Defaults to <c>16777215</c>, the maximum value of the 24-bit Message
+Length field in a Diameter Header.</p>
+
+</item>
+
+<tag>
<marker id="length_errors"/><c>{length_errors, exit|handle|discard}</c></tag>
<item>
<p>
@@ -1326,7 +1392,64 @@ incoming Diameter request.
Options <c>monitor</c> and <c>link</c> are ignored.</p>
<p>
-Defaults to the list configured on the service if not specified.</p>
+Defaults to the empty list.</p>
+</item>
+
+<tag>
+<marker id="strict_capx"/><c>{strict_capx, boolean()]}</c></tag>
+<item>
+<p>
+Whether or not to enforce the RFC 6733 requirement that any message
+before capabilities exchange should close the peer connection.
+If false then unexpected messages are discarded.</p>
+
+<p>
+Defaults to true.
+Changing this results in non-standard behaviour, but can be useful in
+case peers are known to be behave badly.</p>
+</item>
+
+<tag>
+<marker id="strict_mbit"/><c>{strict_mbit, boolean()}</c></tag>
+<item>
+<p>
+Whether or not to regard an AVP setting the M-bit as erroneous when
+the command grammar in question does not explicitly allow the AVP.
+If <c>true</c> then such AVPs are regarded as 5001 errors,
+DIAMETER_AVP_UNSUPPORTED.
+If <c>false</c> then the M-bit is ignored and policing
+it becomes the receiver's responsibility.</p>
+
+<p>
+Defaults to <c>true</c>.</p>
+
+<warning>
+<p>
+RFC 6733 is unclear about the semantics of the M-bit.
+One the one hand, the CCF specification in section 3.2 documents AVP
+in a command grammar as meaning <em>any</em> arbitrary AVP; on the
+other hand, 1.3.4 states that AVPs setting the M-bit cannot be added
+to an existing command: the modified command must instead be
+placed in a new Diameter application.</p>
+<p>
+The reason for the latter is presumably interoperability:
+allowing arbitrary AVPs setting the M-bit in a command makes its
+interpretation implementation-dependent, since there's no
+guarantee that all implementations will understand the same set of
+arbitrary AVPs in the context of a given command.
+However, interpreting <c>AVP</c> in a command grammar as any
+AVP, regardless of M-bit, renders 1.3.4 meaningless, since the receiver
+can simply ignore any AVP it thinks isn't relevant, regardless of the
+sender's intent.</p>
+<p>
+Beware of confusing mandatory in the sense of the M-bit with mandatory
+in the sense of the command grammar.
+The former is a semantic requirement: that the receiver understand the
+semantics of the AVP in the context in question.
+The latter is a syntactic requirement: whether or not the AVP must
+occur in the message in question.</p>
+</warning>
+
</item>
<tag>
diff --git a/lib/diameter/doc/src/diameter_app.xml b/lib/diameter/doc/src/diameter_app.xml
index dfcd00975b..aa334beb21 100644
--- a/lib/diameter/doc/src/diameter_app.xml
+++ b/lib/diameter/doc/src/diameter_app.xml
@@ -13,7 +13,8 @@
<header>
<copyright>
-<year>2011</year><year>2016</year>
+<year>2011</year>
+<year>2017</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -319,7 +320,7 @@ or &peer_down; callback.</p>
<v>Action = Send | Discard | {eval_packet, Action, PostF}</v>
<v>Send = {send, &packet; | &message;}</v>
<v>Discard = {discard, Reason} | discard</v>
-<v>PostF = &mod_evaluable;}</v>
+<v>PostF = &mod_eval;}</v>
</type>
<desc>
<p>
@@ -371,7 +372,7 @@ discarded}</c>.</p>
<v>Action = Send | Discard | {eval_packet, Action, PostF}</v>
<v>Send = {send, &packet; | &message;}</v>
<v>Discard = {discard, Reason} | discard</v>
-<v>PostF = &mod_evaluable;}</v>
+<v>PostF = &mod_eval;}</v>
</type>
<desc>
<p>
@@ -478,7 +479,7 @@ not selected.</p>
| {answer_message, 3000..3999|5000..5999}
| {protocol_error, 3000..3999}</v>
<v>Opt = &mod_call_opt;</v>
-<v>PostF = &mod_evaluable;</v>
+<v>PostF = &mod_eval;</v>
</type>
<desc>
<p>
diff --git a/lib/diameter/doc/src/diameter_codec.xml b/lib/diameter/doc/src/diameter_codec.xml
index 0117c1c88a..5124b49484 100644
--- a/lib/diameter/doc/src/diameter_codec.xml
+++ b/lib/diameter/doc/src/diameter_codec.xml
@@ -4,7 +4,10 @@
'<seealso marker="diameter_dict#MESSAGE_RECORDS">diameter_dict(4)</seealso>'>
<!ENTITY types
'<seealso marker="diameter_dict#DATA_TYPES">diameter_dict(4)</seealso>'>
- <!ENTITY % also SYSTEM "seealso.ent" >
+ <!ENTITY decode_format
+ '<seealso marker="diameter#decode_format">decode format</seealso>'>
+
+<!ENTITY % also SYSTEM "seealso.ent" >
<!ENTITY % here SYSTEM "seehere.ent" >
%also;
%here;
@@ -145,7 +148,8 @@ question.</p>
<p>
The decoded value of an AVP.
Will be <c>undefined</c> on decode if the data bytes could
-not be decoded or the AVP is unknown.
+not be decoded, the AVP is unknown, or if the &decode_format; is
+<c>none</c>.
The type of a decoded value is as document in &types;.</p>
</item>
@@ -230,7 +234,8 @@ header.</p>
</item>
<tag>
-<marker id="message"/><c>message() = record() | list()</c></tag>
+<marker id="message"/><c>message() = record()
+ | maybe_improper_list()</c></tag>
<item>
<p>
The representation of a Diameter message as passed to
@@ -240,7 +245,10 @@ a message as defined in a dictionary file is encoded as a record with
one field for each component AVP.
Equivalently, a message can also be encoded as a list whose head is
the atom-valued message name (as specified in the relevant dictionary
-file) and whose tail is a list of <c>{AvpName, AvpValue}</c> pairs.</p>
+file) and whose tail is either a list of AVP name/values
+pairs or a map with values keyed on AVP names.
+The format at decode is determined by &mod_decode_format;.
+Any of the formats is accepted at encode.</p>
<p>
Another list-valued representation allows a message to be specified
@@ -283,15 +291,16 @@ value other than <c>undefined</c>.</p>
<item>
<p>
The incoming/outgoing message.
-For an incoming message, a record if the message can be
-decoded in a non-relay application, <c>undefined</c> otherwise.
+For an incoming message, a term corresponding to the configured
+&decode_format; if the message can be decoded in a non-relay
+application, <c>undefined</c> otherwise.
For an outgoing message, setting a <c>[&header; | &avp;]</c> list is
equivalent to setting the <c>header</c> and <c>avps</c> fields to the
corresponding values.</p>
<warning>
<p>
-A record-valued <c>msg</c> field does <em>not</em> imply an absence of
+A value in the <c>msg</c> field does <em>not</em> imply an absence of
decode errors.
The <c>errors</c> field should also be examined.</p>
</warning>
diff --git a/lib/diameter/doc/src/diameter_sctp.xml b/lib/diameter/doc/src/diameter_sctp.xml
index 9b6d629f79..c9b74a9ec5 100644
--- a/lib/diameter/doc/src/diameter_sctp.xml
+++ b/lib/diameter/doc/src/diameter_sctp.xml
@@ -16,7 +16,7 @@
<header>
<copyright>
<year>2011</year>
-<year>2016</year>
+<year>2017</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -116,7 +116,6 @@ and port respectively.</p>
Multiple <c>ip</c> options can be specified for a multihomed peer.
If none are specified then the values of <c>Host-IP-Address</c>
in the <c>diameter_service</c> record are used.
-(In particular, one of these must be specified.)
Option <c>port</c> defaults to 3868 for a listening transport and 0 for a
connecting transport.</p>
diff --git a/lib/diameter/doc/src/diameter_tcp.xml b/lib/diameter/doc/src/diameter_tcp.xml
index 6ca280c52b..1d65d14257 100644
--- a/lib/diameter/doc/src/diameter_tcp.xml
+++ b/lib/diameter/doc/src/diameter_tcp.xml
@@ -170,14 +170,11 @@ that will not be forthcoming, which will eventually cause the RFC 3539
watchdog to take down the connection.</p>
<p>
-If an <c>ip</c> option is not specified then the first element of a
-non-empty <c>Host-IP-Address</c> list in <c>Svc</c> provides the local
-IP address.
-If neither is specified then the default address selected by &gen_tcp;
-is used.
-In all cases, the selected address is either returned from
-&start; or passed in a <c>connected</c> message over the transport
-interface.</p>
+The first element of a non-empty <c>Host-IP-Address</c> list in
+<c>Svc</c> provides the local IP address if an <c>ip</c> option is not
+specified.
+The local address is either returned from&start; or passed in a
+<c>connected</c> message over the transport interface.</p>
</desc>
</func>
diff --git a/lib/diameter/doc/src/seealso.ent b/lib/diameter/doc/src/seealso.ent
index e5c284c6e8..c5a53670d0 100644
--- a/lib/diameter/doc/src/seealso.ent
+++ b/lib/diameter/doc/src/seealso.ent
@@ -4,7 +4,7 @@
%CopyrightBegin%
-Copyright Ericsson AB 2012-2015. All Rights Reserved.
+Copyright Ericsson AB 2012-2017. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
@@ -53,7 +53,7 @@ significant.
<!ENTITY mod_application_opt '<seealso marker="diameter#application_opt">diameter:application_opt()</seealso>'>
<!ENTITY mod_call_opt '<seealso marker="diameter#call_opt">diameter:call_opt()</seealso>'>
<!ENTITY mod_capability '<seealso marker="diameter#capability">diameter:capability()</seealso>'>
-<!ENTITY mod_evaluable '<seealso marker="diameter#evaluable">diameter:evaluable()</seealso>'>
+<!ENTITY mod_eval '<seealso marker="diameter#eval">diameter:eval()</seealso>'>
<!ENTITY mod_peer_filter '<seealso marker="diameter#peer_filter">diameter:peer_filter()</seealso>'>
<!ENTITY mod_service_event '<seealso marker="diameter#service_event">diameter:service_event()</seealso>'>
<!ENTITY mod_service_event_info '<seealso marker="diameter#service_event_info">diameter:service_event_info()</seealso>'>
@@ -72,6 +72,7 @@ significant.
<!ENTITY watchdog_timer '<seealso marker="#watchdog_timer">watchdog_timer</seealso>'>
<!ENTITY mod_string_decode '<seealso marker="diameter#service_opt">diameter:service_opt()</seealso> <seealso marker="diameter#string_decode">string_decode</seealso>'>
+<!ENTITY mod_decode_format '<seealso marker="diameter#service_opt">diameter:service_opt()</seealso> <seealso marker="diameter#decode_format">decode_format</seealso>'>
<!-- diameter_app -->
diff --git a/lib/diameter/doc/standard/rfc7683.txt b/lib/diameter/doc/standard/rfc7683.txt
new file mode 100644
index 0000000000..ab2392c6c0
--- /dev/null
+++ b/lib/diameter/doc/standard/rfc7683.txt
@@ -0,0 +1,2355 @@
+
+
+
+
+
+
+Internet Engineering Task Force (IETF) J. Korhonen, Ed.
+Request for Comments: 7683 Broadcom Corporation
+Category: Standards Track S. Donovan, Ed.
+ISSN: 2070-1721 B. Campbell
+ Oracle
+ L. Morand
+ Orange Labs
+ October 2015
+
+
+ Diameter Overload Indication Conveyance
+
+Abstract
+
+ This specification defines a base solution for Diameter overload
+ control, referred to as Diameter Overload Indication Conveyance
+ (DOIC).
+
+Status of This Memo
+
+ This is an Internet Standards Track document.
+
+ This document is a product of the Internet Engineering Task Force
+ (IETF). It represents the consensus of the IETF community. It has
+ received public review and has been approved for publication by the
+ Internet Engineering Steering Group (IESG). Further information on
+ Internet Standards is available in Section 2 of RFC 5741.
+
+ Information about the current status of this document, any errata,
+ and how to provide feedback on it may be obtained at
+ http://www.rfc-editor.org/info/rfc7683.
+
+Copyright Notice
+
+ Copyright (c) 2015 IETF Trust and the persons identified as the
+ document authors. All rights reserved.
+
+ This document is subject to BCP 78 and the IETF Trust's Legal
+ Provisions Relating to IETF Documents
+ (http://trustee.ietf.org/license-info) in effect on the date of
+ publication of this document. Please review these documents
+ carefully, as they describe your rights and restrictions with respect
+ to this document. Code Components extracted from this document must
+ include Simplified BSD License text as described in Section 4.e of
+ the Trust Legal Provisions and are provided without warranty as
+ described in the Simplified BSD License.
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 1]
+
+RFC 7683 DOIC October 2015
+
+
+Table of Contents
+
+ 1. Introduction . . . . . . . . . . . . . . . . . . . . . . . . 3
+ 2. Terminology and Abbreviations . . . . . . . . . . . . . . . . 3
+ 3. Conventions Used in This Document . . . . . . . . . . . . . . 5
+ 4. Solution Overview . . . . . . . . . . . . . . . . . . . . . . 5
+ 4.1. Piggybacking . . . . . . . . . . . . . . . . . . . . . . 6
+ 4.2. DOIC Capability Announcement . . . . . . . . . . . . . . 7
+ 4.3. DOIC Overload Condition Reporting . . . . . . . . . . . . 9
+ 4.4. DOIC Extensibility . . . . . . . . . . . . . . . . . . . 11
+ 4.5. Simplified Example Architecture . . . . . . . . . . . . . 12
+ 5. Solution Procedures . . . . . . . . . . . . . . . . . . . . . 12
+ 5.1. Capability Announcement . . . . . . . . . . . . . . . . . 12
+ 5.1.1. Reacting Node Behavior . . . . . . . . . . . . . . . 13
+ 5.1.2. Reporting Node Behavior . . . . . . . . . . . . . . . 13
+ 5.1.3. Agent Behavior . . . . . . . . . . . . . . . . . . . 14
+ 5.2. Overload Report Processing . . . . . . . . . . . . . . . 15
+ 5.2.1. Overload Control State . . . . . . . . . . . . . . . 15
+ 5.2.2. Reacting Node Behavior . . . . . . . . . . . . . . . 19
+ 5.2.3. Reporting Node Behavior . . . . . . . . . . . . . . . 20
+ 5.3. Protocol Extensibility . . . . . . . . . . . . . . . . . 22
+ 6. Loss Algorithm . . . . . . . . . . . . . . . . . . . . . . . 23
+ 6.1. Overview . . . . . . . . . . . . . . . . . . . . . . . . 23
+ 6.2. Reporting Node Behavior . . . . . . . . . . . . . . . . . 24
+ 6.3. Reacting Node Behavior . . . . . . . . . . . . . . . . . 24
+ 7. Attribute Value Pairs . . . . . . . . . . . . . . . . . . . . 25
+ 7.1. OC-Supported-Features AVP . . . . . . . . . . . . . . . . 25
+ 7.2. OC-Feature-Vector AVP . . . . . . . . . . . . . . . . . . 25
+ 7.3. OC-OLR AVP . . . . . . . . . . . . . . . . . . . . . . . 26
+ 7.4. OC-Sequence-Number AVP . . . . . . . . . . . . . . . . . 26
+ 7.5. OC-Validity-Duration AVP . . . . . . . . . . . . . . . . 26
+ 7.6. OC-Report-Type AVP . . . . . . . . . . . . . . . . . . . 27
+ 7.7. OC-Reduction-Percentage AVP . . . . . . . . . . . . . . . 27
+ 7.8. AVP Flag Rules . . . . . . . . . . . . . . . . . . . . . 28
+ 8. Error Response Codes . . . . . . . . . . . . . . . . . . . . 28
+ 9. IANA Considerations . . . . . . . . . . . . . . . . . . . . . 29
+ 9.1. AVP Codes . . . . . . . . . . . . . . . . . . . . . . . . 29
+ 9.2. New Registries . . . . . . . . . . . . . . . . . . . . . 29
+ 10. Security Considerations . . . . . . . . . . . . . . . . . . . 30
+ 10.1. Potential Threat Modes . . . . . . . . . . . . . . . . . 30
+ 10.2. Denial-of-Service Attacks . . . . . . . . . . . . . . . 31
+ 10.3. Noncompliant Nodes . . . . . . . . . . . . . . . . . . . 32
+ 10.4. End-to-End Security Issues . . . . . . . . . . . . . . . 32
+ 11. References . . . . . . . . . . . . . . . . . . . . . . . . . 34
+ 11.1. Normative References . . . . . . . . . . . . . . . . . . 34
+ 11.2. Informative References . . . . . . . . . . . . . . . . . 34
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 2]
+
+RFC 7683 DOIC October 2015
+
+
+ Appendix A. Issues Left for Future Specifications . . . . . . . 35
+ A.1. Additional Traffic Abatement Algorithms . . . . . . . . . 35
+ A.2. Agent Overload . . . . . . . . . . . . . . . . . . . . . 35
+ A.3. New Error Diagnostic AVP . . . . . . . . . . . . . . . . 35
+ Appendix B. Deployment Considerations . . . . . . . . . . . . . 35
+ Appendix C. Considerations for Applications Integrating the DOIC
+ Solution . . . . . . . . . . . . . . . . . . . . . . 36
+ C.1. Application Classification . . . . . . . . . . . . . . . 36
+ C.2. Implications of Application Type Overload . . . . . . . . 37
+ C.3. Request Transaction Classification . . . . . . . . . . . 38
+ C.4. Request Type Overload Implications . . . . . . . . . . . 39
+ Contributors . . . . . . . . . . . . . . . . . . . . . . . . . . 41
+ Authors' Addresses . . . . . . . . . . . . . . . . . . . . . . . 42
+
+1. Introduction
+
+ This specification defines a base solution for Diameter overload
+ control, referred to as Diameter Overload Indication Conveyance
+ (DOIC), based on the requirements identified in [RFC7068].
+
+ This specification addresses Diameter overload control between
+ Diameter nodes that support the DOIC solution. The solution, which
+ is designed to apply to existing and future Diameter applications,
+ requires no changes to the Diameter base protocol [RFC6733] and is
+ deployable in environments where some Diameter nodes do not implement
+ the Diameter overload control solution defined in this specification.
+
+ A new application specification can incorporate the overload control
+ mechanism specified in this document by making it mandatory to
+ implement for the application and referencing this specification
+ normatively. It is the responsibility of the Diameter application
+ designers to define how overload control mechanisms work on that
+ application.
+
+ Note that the overload control solution defined in this specification
+ does not address all the requirements listed in [RFC7068]. A number
+ of features related to overload control are left for future
+ specifications. See Appendix A for a list of extensions that are
+ currently being considered.
+
+2. Terminology and Abbreviations
+
+ Abatement
+
+ Reaction to receipt of an overload report resulting in a reduction
+ in traffic sent to the reporting node. Abatement actions include
+ diversion and throttling.
+
+
+
+
+Korhonen, et al. Standards Track [Page 3]
+
+RFC 7683 DOIC October 2015
+
+
+ Abatement Algorithm
+
+ An extensible method requested by reporting nodes and used by
+ reacting nodes to reduce the amount of traffic sent during an
+ occurrence of overload control.
+
+ Diversion
+
+ An overload abatement treatment where the reacting node selects
+ alternate destinations or paths for requests.
+
+ Host-Routed Requests
+
+ Requests that a reacting node knows will be served by a particular
+ host, either due to the presence of a Destination-Host Attribute
+ Value Pair (AVP) or by some other local knowledge on the part of
+ the reacting node.
+
+ Overload Control State (OCS)
+
+ Internal state maintained by a reporting or reacting node
+ describing occurrences of overload control.
+
+ Overload Report (OLR)
+
+ Overload control information for a particular overload occurrence
+ sent by a reporting node.
+
+ Reacting Node
+
+ A Diameter node that acts upon an overload report.
+
+ Realm-Routed Requests
+
+ Requests sent by a reacting node where the reacting node does not
+ know to which host the request will be routed.
+
+ Reporting Node
+
+ A Diameter node that generates an overload report. (This may or
+ may not be the overloaded node.)
+
+
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 4]
+
+RFC 7683 DOIC October 2015
+
+
+ Throttling
+
+ An abatement treatment that limits the number of requests sent by
+ the reacting node. Throttling can include a Diameter Client
+ choosing to not send requests, or a Diameter Agent or Server
+ rejecting requests with appropriate error responses. In both
+ cases, the result of the throttling is a permanent rejection of
+ the transaction.
+
+3. Conventions Used in This Document
+
+ The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT",
+ "SHOULD", "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this
+ document are to be interpreted as described in RFC 2119 [RFC2119].
+
+ The interpretation from RFC 2119 [RFC2119] does not apply for the
+ above listed words when they are not used in all caps.
+
+4. Solution Overview
+
+ The Diameter Overload Information Conveyance (DOIC) solution allows
+ Diameter nodes to request that other Diameter nodes perform overload
+ abatement actions, that is, actions to reduce the load offered to the
+ overloaded node or realm.
+
+ A Diameter node that supports DOIC is known as a "DOIC node". Any
+ Diameter node can act as a DOIC node, including Diameter Clients,
+ Diameter Servers, and Diameter Agents. DOIC nodes are further
+ divided into "Reporting Nodes" and "Reacting Nodes." A reporting
+ node requests overload abatement by sending Overload Reports (OLRs).
+
+ A reacting node acts upon OLRs and performs whatever actions are
+ needed to fulfill the abatement requests included in the OLRs. A
+ reporting node may report overload on its own behalf or on behalf of
+ other nodes. Likewise, a reacting node may perform overload
+ abatement on its own behalf or on behalf of other nodes.
+
+ A Diameter node's role as a DOIC node is independent of its Diameter
+ role. For example, Diameter Agents may act as DOIC nodes, even
+ though they are not endpoints in the Diameter sense. Since Diameter
+ enables bidirectional applications, where Diameter Servers can send
+ requests towards Diameter Clients, a given Diameter node can
+ simultaneously act as both a reporting node and a reacting node.
+
+ Likewise, a Diameter Agent may act as a reacting node from the
+ perspective of upstream nodes, and a reporting node from the
+ perspective of downstream nodes.
+
+
+
+
+Korhonen, et al. Standards Track [Page 5]
+
+RFC 7683 DOIC October 2015
+
+
+ DOIC nodes do not generate new messages to carry DOIC-related
+ information. Rather, they "piggyback" DOIC information over existing
+ Diameter messages by inserting new AVPs into existing Diameter
+ requests and responses. Nodes indicate support for DOIC, and any
+ needed DOIC parameters, by inserting an OC-Supported-Features AVP
+ (Section 7.1) into existing requests and responses. Reporting nodes
+ send OLRs by inserting OC-OLR AVPs (Section 7.3).
+
+ A given OLR applies to the Diameter realm and application of the
+ Diameter message that carries it. If a reporting node supports more
+ than one realm and/or application, it reports independently for each
+ combination of realm and application. Similarly, the OC-Supported-
+ Features AVP applies to the realm and application of the enclosing
+ message. This implies that a node may support DOIC for one
+ application and/or realm, but not another, and may indicate different
+ DOIC parameters for each application and realm for which it supports
+ DOIC.
+
+ Reacting nodes perform overload abatement according to an agreed-upon
+ abatement algorithm. An abatement algorithm defines the meaning of
+ some of the parameters of an OLR and the procedures required for
+ overload abatement. An overload abatement algorithm separates
+ Diameter requests into two sets. The first set contains the requests
+ that are to undergo overload abatement treatment of either throttling
+ or diversion. The second set contains the requests that are to be
+ given normal routing treatment. This document specifies a single
+ "must-support" algorithm, namely, the "loss" algorithm (Section 6).
+ Future specifications may introduce new algorithms.
+
+ Overload conditions may vary in scope. For example, a single
+ Diameter node may be overloaded, in which case, reacting nodes may
+ attempt to send requests to other destinations. On the other hand,
+ an entire Diameter realm may be overloaded, in which case, such
+ attempts would do harm. DOIC OLRs have a concept of "report type"
+ (Section 7.6), where the type defines such behaviors. Report types
+ are extensible. This document defines report types for overload of a
+ specific host and for overload of an entire realm.
+
+ DOIC works through non-supporting Diameter Agents that properly pass
+ unknown AVPs unchanged.
+
+4.1. Piggybacking
+
+ There is no new Diameter application defined to carry overload-
+ related AVPs. The overload control AVPs defined in this
+ specification have been designed to be piggybacked on top of existing
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 6]
+
+RFC 7683 DOIC October 2015
+
+
+ application messages. This is made possible by adding the optional
+ overload control AVPs OC-OLR and OC-Supported-Features into existing
+ commands.
+
+ Reacting nodes indicate support for DOIC by including the
+ OC-Supported-Features AVP in all request messages originated or
+ relayed by the reacting node.
+
+ Reporting nodes indicate support for DOIC by including the
+ OC-Supported-Features AVP in all answer messages that are originated
+ or relayed by the reporting node and that are in response to a
+ request that contained the OC-Supported-Features AVP. Reporting
+ nodes may include overload reports using the OC-OLR AVP in answer
+ messages.
+
+ Note that the overload control solution does not have fixed server
+ and client roles. The DOIC node role is determined based on the
+ message type: whether the message is a request (i.e., sent by a
+ "reacting node") or an answer (i.e., sent by a "reporting node").
+ Therefore, in a typical client-server deployment, the Diameter Client
+ may report its overload condition to the Diameter Server for any
+ Diameter-Server-initiated message exchange. An example of such is
+ the Diameter Server requesting a re-authentication from a Diameter
+ Client.
+
+4.2. DOIC Capability Announcement
+
+ The DOIC solution supports the ability for Diameter nodes to
+ determine if other nodes in the path of a request support the
+ solution. This capability is referred to as DOIC Capability
+ Announcement (DCA) and is separate from the Diameter Capability
+ Exchange.
+
+ The DCA mechanism uses the OC-Supported-Features AVPs to indicate the
+ Diameter overload features supported.
+
+ The first node in the path of a Diameter request that supports the
+ DOIC solution inserts the OC-Supported-Features AVP in the request
+ message.
+
+ The individual features supported by the DOIC nodes are indicated in
+ the OC-Feature-Vector AVP. Any semantics associated with the
+ features will be defined in extension specifications that introduce
+ the features.
+
+ Note: As discussed elsewhere in the document, agents in the path
+ of the request can modify the OC-Supported-Features AVP.
+
+
+
+
+Korhonen, et al. Standards Track [Page 7]
+
+RFC 7683 DOIC October 2015
+
+
+ Note: The DOIC solution must support deployments where Diameter
+ Clients and/or Diameter Servers do not support the DOIC solution.
+ In this scenario, Diameter Agents that support the DOIC solution
+ may handle overload abatement for the non-supporting Diameter
+ nodes. In this case, the DOIC agent will insert the OC-Supported-
+ Features AVP in requests that do not already contain one, telling
+ the reporting node that there is a DOIC node that will handle
+ overload abatement. For transactions where there was an
+ OC-Supporting-Features AVP in the request, the agent will insert
+ the OC-Supported-Features AVP in answers, telling the reacting
+ node that there is a reporting node.
+
+ The OC-Feature-Vector AVP will always contain an indication of
+ support for the loss overload abatement algorithm defined in this
+ specification (see Section 6). This ensures that a reporting node
+ always supports at least one of the advertised abatement algorithms
+ received in a request messages.
+
+ The reporting node inserts the OC-Supported-Features AVP in all
+ answer messages to requests that contained the OC-Supported-Features
+ AVP. The contents of the reporting node's OC-Supported-Features AVP
+ indicate the set of Diameter overload features supported by the
+ reporting node. This specification defines one exception -- the
+ reporting node only includes an indication of support for one
+ overload abatement algorithm, independent of the number of overload
+ abatement algorithms actually supported by the reacting node. The
+ overload abatement algorithm indicated is the algorithm that the
+ reporting node intends to use should it enter an overload condition.
+ Reacting nodes can use the indicated overload abatement algorithm to
+ prepare for possible overload reports and must use the indicated
+ overload abatement algorithm if traffic reduction is actually
+ requested.
+
+ Note that the loss algorithm defined in this document is a
+ stateless abatement algorithm. As a result, it does not require
+ any actions by reacting nodes prior to the receipt of an overload
+ report. Stateful abatement algorithms that base the abatement
+ logic on a history of request messages sent might require reacting
+ nodes to maintain state in advance of receiving an overload report
+ to ensure that the overload reports can be properly handled.
+
+ While it should only be done in exceptional circumstances and not
+ during an active occurrence of overload, a reacting node that wishes
+ to transition to a different abatement algorithm can stop advertising
+ support for the algorithm indicated by the reporting node, as long as
+ support for the loss algorithm is always advertised.
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 8]
+
+RFC 7683 DOIC October 2015
+
+
+ The DCA mechanism must also allow the scenario where the set of
+ features supported by the sender of a request and by agents in the
+ path of a request differ. In this case, the agent can update the
+ OC-Supported-Features AVP to reflect the mixture of the two sets of
+ supported features.
+
+ Note: The logic to determine if the content of the OC-Supported-
+ Features AVP should be changed is out of scope for this document,
+ as is the logic to determine the content of a modified
+ OC-Supported-Features AVP. These are left to implementation
+ decisions. Care must be taken not to introduce interoperability
+ issues for downstream or upstream DOIC nodes. As such, the agent
+ must act as a fully compliant reporting node to the downstream
+ reacting node and as a fully compliant reacting node to the
+ upstream reporting node.
+
+4.3. DOIC Overload Condition Reporting
+
+ As with DOIC capability announcement, overload condition reporting
+ uses new AVPs (Section 7.3) to indicate an overload condition.
+
+ The OC-OLR AVP is referred to as an overload report. The OC-OLR AVP
+ includes the type of report, a sequence number, the length of time
+ that the report is valid, and AVPs specific to the abatement
+ algorithm.
+
+ Two types of overload reports are defined in this document: host
+ reports and realm reports.
+
+ A report of type "HOST_REPORT" is sent to indicate the overload of a
+ specific host, identified by the Origin-Host AVP of the message
+ containing the OLR, for the Application-ID indicated in the
+ transaction. When receiving an OLR of type "HOST_REPORT", a reacting
+ node applies overload abatement treatment to the host-routed requests
+ identified by the overload abatement algorithm (as defined in
+ Section 2) sent for this application to the overloaded host.
+
+ A report of type "REALM_REPORT" is sent to indicate the overload of a
+ realm for the Application-ID indicated in the transaction. The
+ overloaded realm is identified by the Destination-Realm AVP of the
+ message containing the OLR. When receiving an OLR of type
+ "REALM_REPORT", a reacting node applies overload abatement treatment
+ to realm-routed requests identified by the overload abatement
+ algorithm (as defined in Section 2) sent for this application to the
+ overloaded realm.
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 9]
+
+RFC 7683 DOIC October 2015
+
+
+ This document assumes that there is a single source for realm reports
+ for a given realm, or that if multiple nodes can send realm reports,
+ that each such node has full knowledge of the overload state of the
+ entire realm. A reacting node cannot distinguish between receiving
+ realm reports from a single node or from multiple nodes.
+
+ Note: Known issues exist if there are multiple sources for
+ overload reports that apply to the same Diameter entity. Reacting
+ nodes have no way of determining the source and, as such, will
+ treat them as coming from a single source. Variance in sequence
+ numbers between the two sources can then cause incorrect overload
+ abatement treatment to be applied for indeterminate periods of
+ time.
+
+ Reporting nodes are responsible for determining the need for a
+ reduction of traffic. The method for making this determination is
+ implementation specific and depends on the type of overload report
+ being generated. A host report might be generated by tracking use of
+ resources required by the host to handle transactions for the
+ Diameter application. A realm report generally impacts the traffic
+ sent to multiple hosts and, as such, requires tracking the capacity
+ of all servers able to handle realm-routed requests for the
+ application and realm.
+
+ Once a reporting node determines the need for a reduction in traffic,
+ it uses the DOIC-defined AVPs to report on the condition. These AVPs
+ are included in answer messages sent or relayed by the reporting
+ node. The reporting node indicates the overload abatement algorithm
+ that is to be used to handle the traffic reduction in the
+ OC-Supported-Features AVP. The OC-OLR AVP is used to communicate
+ information about the requested reduction.
+
+ Reacting nodes, upon receipt of an overload report, apply the
+ overload abatement algorithm to traffic impacted by the overload
+ report. The method used to determine the requests that are to
+ receive overload abatement treatment is dependent on the abatement
+ algorithm. The loss abatement algorithm is defined in this document
+ (Section 6). Other abatement algorithms can be defined in extensions
+ to the DOIC solution.
+
+ Two types of overload abatement treatment are defined, diversion and
+ throttling. Reacting nodes are responsible for determining which
+ treatment is appropriate for individual requests.
+
+ As the conditions that lead to the generation of the overload report
+ change, the reporting node can send new overload reports requesting
+ greater reduction if the condition gets worse or less reduction if
+ the condition improves. The reporting node sends an overload report
+
+
+
+Korhonen, et al. Standards Track [Page 10]
+
+RFC 7683 DOIC October 2015
+
+
+ with a duration of zero to indicate that the overload condition has
+ ended and abatement is no longer needed.
+
+ The reacting node also determines when the overload report expires
+ based on the OC-Validity-Duration AVP in the overload report and
+ stops applying the abatement algorithm when the report expires.
+
+ Note that erroneous overload reports can be used for DoS attacks.
+ This includes the ability to indicate that a significant reduction in
+ traffic, up to and including a request for no traffic, should be sent
+ to a reporting node. As such, care should be taken to verify the
+ sender of overload reports.
+
+4.4. DOIC Extensibility
+
+ The DOIC solution is designed to be extensible. This extensibility
+ is based on existing Diameter-based extensibility mechanisms, along
+ with the DOIC capability announcement mechanism.
+
+ There are multiple categories of extensions that are expected. This
+ includes the definition of new overload abatement algorithms, the
+ definition of new report types, and the definition of new scopes of
+ messages impacted by an overload report.
+
+ A DOIC node communicates supported features by including them in the
+ OC-Feature-Vector AVP, as a sub-AVP of OC-Supported-Features. Any
+ non-backwards-compatible DOIC extensions define new values for the
+ OC-Feature-Vector AVP. DOIC extensions also have the ability to add
+ new AVPs to the OC-Supported-Features AVP, if additional information
+ about the new feature is required.
+
+ Overload reports can also be extended by adding new sub-AVPs to the
+ OC-OLR AVP, allowing reporting nodes to communicate additional
+ information about handling an overload condition.
+
+ If necessary, new extensions can also define new AVPs that are not
+ part of the OC-Supported-Features and OC-OLR group AVPs. It is,
+ however, recommended that DOIC extensions use the OC-Supported-
+ Features AVP and OC-OLR AVP to carry all DOIC-related AVPs.
+
+
+
+
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 11]
+
+RFC 7683 DOIC October 2015
+
+
+4.5. Simplified Example Architecture
+
+ Figure 1 illustrates the simplified architecture for Diameter
+ overload information conveyance.
+
+ Realm X Same or other Realms
+ <--------------------------------------> <---------------------->
+
+
+ +--------+ : (optional) :
+ |Diameter| : :
+ |Server A|--+ .--. : +--------+ : .--.
+ +--------+ | _( `. : |Diameter| : _( `. +--------+
+ +--( )--:-| Agent |-:--( )--|Diameter|
+ +--------+ | ( ` . ) ) : +--------+ : ( ` . ) ) | Client |
+ |Diameter|--+ `--(___.-' : : `--(___.-' +--------+
+ |Server B| : :
+ +--------+ : :
+
+ End-to-end Overload Indication
+ 1) <----------------------------------------------->
+ Diameter Application Y
+
+ Overload Indication A Overload Indication A'
+ 2) <----------------------> <---------------------->
+ Diameter Application Y Diameter Application Y
+
+ Figure 1: Simplified Architecture Choices for Overload Indication
+ Delivery
+
+ In Figure 1, the Diameter overload indication can be conveyed (1)
+ end-to-end between servers and clients or (2) between servers and the
+ Diameter Agent inside the realm and then between the Diameter Agent
+ and the clients.
+
+5. Solution Procedures
+
+ This section outlines the normative behavior for the DOIC solution.
+
+5.1. Capability Announcement
+
+ This section defines DOIC Capability Announcement (DCA) behavior.
+
+ Note: This specification assumes that changes in DOIC node
+ capabilities are relatively rare events that occur as a result of
+ administrative action. Reacting nodes ought to minimize changes
+ that force the reporting node to change the features being used,
+ especially during active overload conditions. But even if
+
+
+
+Korhonen, et al. Standards Track [Page 12]
+
+RFC 7683 DOIC October 2015
+
+
+ reacting nodes avoid such changes, reporting nodes still have to
+ be prepared for them to occur. For example, differing
+ capabilities between multiple reacting nodes may still force a
+ reporting node to select different features on a per-transaction
+ basis.
+
+5.1.1. Reacting Node Behavior
+
+ A reacting node MUST include the OC-Supported-Features AVP in all
+ requests. It MAY include the OC-Feature-Vector AVP, as a sub-AVP of
+ OC-Supported-Features. If it does so, it MUST indicate support for
+ the "loss" algorithm. If the reacting node is configured to support
+ features (including other algorithms) in addition to the loss
+ algorithm, it MUST indicate such support in an OC-Feature-Vector AVP.
+
+ An OC-Supported-Features AVP in answer messages indicates there is a
+ reporting node for the transaction. The reacting node MAY take
+ action, for example, creating state for some stateful abatement
+ algorithm, based on the features indicated in the OC-Feature-Vector
+ AVP.
+
+ Note: The loss abatement algorithm does not require stateful
+ behavior when there is no active overload report.
+
+ Reacting nodes need to be prepared for the reporting node to change
+ selected algorithms. This can happen at any time, including when the
+ reporting node has sent an active overload report. The reacting node
+ can minimize the potential for changes by modifying the advertised
+ abatement algorithms sent to an overloaded reporting node to the
+ currently selected algorithm and loss (or just loss if it is the
+ currently selected algorithm). This has the effect of limiting the
+ potential change in abatement algorithm from the currently selected
+ algorithm to loss, avoiding changes to more complex abatement
+ algorithms that require state to operate properly.
+
+5.1.2. Reporting Node Behavior
+
+ Upon receipt of a request message, a reporting node determines if
+ there is a reacting node for the transaction based on the presence of
+ the OC-Supported-Features AVP in the request message.
+
+ If the request message contains an OC-Supported-Features AVP, then a
+ reporting node MUST include the OC-Supported-Features AVP in the
+ answer message for that transaction.
+
+ Note: Capability announcement is done on a per-transaction basis.
+ The reporting node cannot assume that the capabilities announced
+ by a reacting node will be the same between transactions.
+
+
+
+Korhonen, et al. Standards Track [Page 13]
+
+RFC 7683 DOIC October 2015
+
+
+ A reporting node MUST NOT include the OC-Supported-Features AVP,
+ OC-OLR AVP, or any other overload control AVPs defined in extension
+ documents in response messages for transactions where the request
+ message does not include the OC-Supported-Features AVP. Lack of the
+ OC-Supported-Features AVP in the request message indicates that there
+ is no reacting node for the transaction.
+
+ A reporting node knows what overload control functionality is
+ supported by the reacting node based on the content or absence of the
+ OC-Feature-Vector AVP within the OC-Supported-Features AVP in the
+ request message.
+
+ A reporting node MUST select a single abatement algorithm in the
+ OC-Feature-Vector AVP. The abatement algorithm selected MUST
+ indicate the abatement algorithm the reporting node wants the
+ reacting node to use when the reporting node enters an overload
+ condition.
+
+ The abatement algorithm selected MUST be from the set of abatement
+ algorithms contained in the request message's OC-Feature-Vector AVP.
+
+ A reporting node that selects the loss algorithm may do so by
+ including the OC-Feature-Vector AVP with an explicit indication of
+ the loss algorithm, or it MAY omit the OC-Feature-Vector AVP. If it
+ selects a different algorithm, it MUST include the OC-Feature-Vector
+ AVP with an explicit indication of the selected algorithm.
+
+ The reporting node SHOULD indicate support for other DOIC features
+ defined in extension documents that it supports and that apply to the
+ transaction. It does so using the OC-Feature-Vector AVP.
+
+ Note: Not all DOIC features will apply to all Diameter
+ applications or deployment scenarios. The features included in
+ the OC-Feature-Vector AVP are based on local policy of the
+ reporting node.
+
+5.1.3. Agent Behavior
+
+ Diameter Agents that support DOIC can ensure that all messages
+ relayed by the agent contain the OC-Supported-Features AVP.
+
+ A Diameter Agent MAY take on reacting node behavior for Diameter
+ endpoints that do not support the DOIC solution. A Diameter Agent
+ detects that a Diameter endpoint does not support DOIC reacting node
+ behavior when there is no OC-Supported-Features AVP in a request
+ message.
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 14]
+
+RFC 7683 DOIC October 2015
+
+
+ For a Diameter Agent to be a reacting node for a non-supporting
+ Diameter endpoint, the Diameter Agent MUST include the OC-Supported-
+ Features AVP in request messages it relays that do not contain the
+ OC-Supported-Features AVP.
+
+ A Diameter Agent MAY take on reporting node behavior for Diameter
+ endpoints that do not support the DOIC solution. The Diameter Agent
+ MUST have visibility to all traffic destined for the non-supporting
+ host in order to become the reporting node for the Diameter endpoint.
+ A Diameter Agent detects that a Diameter endpoint does not support
+ DOIC reporting node behavior when there is no OC-Supported-Features
+ AVP in an answer message for a transaction that contained the
+ OC-Supported-Features AVP in the request message.
+
+ If a request already has the OC-Supported-Features AVP, a Diameter
+ Agent MAY modify it to reflect the features appropriate for the
+ transaction. Otherwise, the agent relays the OC-Supported-Features
+ AVP without change.
+
+ Example: If the agent supports a superset of the features reported
+ by the reacting node, then the agent might choose, based on local
+ policy, to advertise that superset of features to the reporting
+ node.
+
+ If the Diameter Agent changes the OC-Supported-Features AVP in a
+ request message, then it is likely it will also need to modify the
+ OC-Supported-Features AVP in the answer message for the transaction.
+ A Diameter Agent MAY modify the OC-Supported-Features AVP carried in
+ answer messages.
+
+ When making changes to the OC-Supported-Features or OC-OLR AVPs, the
+ Diameter Agent needs to ensure consistency in its behavior with both
+ upstream and downstream DOIC nodes.
+
+5.2. Overload Report Processing
+
+5.2.1. Overload Control State
+
+ Both reacting and reporting nodes maintain Overload Control State
+ (OCS) for active overload conditions. The following sections define
+ behavior associated with that OCS.
+
+ The contents of the OCS in the reporting node and in the reacting
+ node represent logical constructs. The actual internal physical
+ structure of the state included in the OCS is an implementation
+ decision.
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 15]
+
+RFC 7683 DOIC October 2015
+
+
+5.2.1.1. Overload Control State for Reacting Nodes
+
+ A reacting node maintains the following OCS per supported Diameter
+ application:
+
+ o a host-type OCS entry for each Destination-Host to which it sends
+ host-type requests and
+
+ o a realm-type OCS entry for each Destination-Realm to which it
+ sends realm-type requests.
+
+ A host-type OCS entry is identified by the pair of Application-ID and
+ the node's DiameterIdentity.
+
+ A realm-type OCS entry is identified by the pair of Application-ID
+ and realm.
+
+ The host-type and realm-type OCS entries include the following
+ information (the actual information stored is an implementation
+ decision):
+
+ o Sequence number (as received in OC-OLR; see Section 7.3)
+
+ o Time of expiry (derived from OC-Validity-Duration AVP received in
+ the OC-OLR AVP and time of reception of the message carrying
+ OC-OLR AVP)
+
+ o Selected abatement algorithm (as received in the OC-Supported-
+ Features AVP)
+
+ o Input data that is abatement algorithm specific (as received in
+ the OC-OLR AVP -- for example, OC-Reduction-Percentage for the
+ loss abatement algorithm)
+
+5.2.1.2. Overload Control State for Reporting Nodes
+
+ A reporting node maintains OCS entries per supported Diameter
+ application, per supported (and eventually selected) abatement
+ algorithm, and per report type.
+
+ An OCS entry is identified by the tuple of Application-ID, report
+ type, and abatement algorithm, and it includes the following
+ information (the actual information stored is an implementation
+ decision):
+
+ o Sequence number
+
+ o Validity duration
+
+
+
+Korhonen, et al. Standards Track [Page 16]
+
+RFC 7683 DOIC October 2015
+
+
+ o Expiration time
+
+ o Input data that is algorithm specific (for example, the reduction
+ percentage for the loss abatement algorithm)
+
+5.2.1.3. Reacting Node's Maintenance of Overload Control State
+
+ When a reacting node receives an OC-OLR AVP, it MUST determine if it
+ is for an existing or new overload condition.
+
+ Note: For the remainder of this section, the term "OLR" refers to
+ the combination of the contents of the received OC-OLR AVP and the
+ abatement algorithm indicated in the received OC-Supported-
+ Features AVP.
+
+ When receiving an answer message with multiple OLRs of different
+ supported report types, a reacting node MUST process each received
+ OLR.
+
+ The OLR is for an existing overload condition if a reacting node has
+ an OCS that matches the received OLR.
+
+ For a host report, this means it matches the Application-ID and the
+ host's DiameterIdentity in an existing host OCS entry.
+
+ For a realm report, this means it matches the Application-ID and the
+ realm in an existing realm OCS entry.
+
+ If the OLR is for an existing overload condition, then a reacting
+ node MUST determine if the OLR is a retransmission or an update to
+ the existing OLR.
+
+ If the sequence number for the received OLR is greater than the
+ sequence number stored in the matching OCS entry, then a reacting
+ node MUST update the matching OCS entry.
+
+ If the sequence number for the received OLR is less than or equal to
+ the sequence number in the matching OCS entry, then a reacting node
+ MUST silently ignore the received OLR. The matching OCS MUST NOT be
+ updated in this case.
+
+ If the reacting node determines that the sequence number has rolled
+ over, then the reacting node MUST update the matching OCS entry.
+ This can be determined by recognizing that the number has changed
+ from a value within 1% of the maximum value in the OC-Sequence-Number
+ AVP to a value within 1% of the minimum value in the OC-Sequence-
+ Number AVP.
+
+
+
+
+Korhonen, et al. Standards Track [Page 17]
+
+RFC 7683 DOIC October 2015
+
+
+ If the received OLR is for a new overload condition, then a reacting
+ node MUST generate a new OCS entry for the overload condition.
+
+ For a host report, this means a reacting node creates an OCS entry
+ with the Application-ID in the received message and DiameterIdentity
+ of the Origin-Host in the received message.
+
+ Note: This solution assumes that the Origin-Host AVP in the answer
+ message included by the reporting node is not changed along the
+ path to the reacting node.
+
+ For a realm report, this means a reacting node creates an OCS entry
+ with the Application-ID in the received message and realm of the
+ Origin-Realm in the received message.
+
+ If the received OLR contains a validity duration of zero ("0"), then
+ a reacting node MUST update the OCS entry as being expired.
+
+ Note: It is not necessarily appropriate to delete the OCS entry,
+ as the recommended behavior is that the reacting node slowly
+ returns to full traffic when ending an overload abatement period.
+
+ The reacting node does not delete an OCS when receiving an answer
+ message that does not contain an OC-OLR AVP (i.e., absence of OLR
+ means "no change").
+
+5.2.1.4. Reporting Node's Maintenance of Overload Control State
+
+ A reporting node SHOULD create a new OCS entry when entering an
+ overload condition.
+
+ Note: If a reporting node knows through absence of the
+ OC-Supported-Features AVP in received messages that there are no
+ reacting nodes supporting DOIC, then the reporting node can choose
+ to not create OCS entries.
+
+ When generating a new OCS entry, the sequence number SHOULD be set to
+ zero ("0").
+
+ When generating sequence numbers for new overload conditions, the new
+ sequence number MUST be greater than any sequence number in an active
+ (unexpired) overload report for the same application and report type
+ previously sent by the reporting node. This property MUST hold over
+ a reboot of the reporting node.
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 18]
+
+RFC 7683 DOIC October 2015
+
+
+ Note: One way of addressing this over a reboot of a reporting node
+ is to use a timestamp for the first overload condition that occurs
+ after the report and to start using sequences beginning with zero
+ for subsequent overload conditions.
+
+ A reporting node MUST update an OCS entry when it needs to adjust the
+ validity duration of the overload condition at reacting nodes.
+
+ Example: If a reporting node wishes to instruct reacting nodes to
+ continue overload abatement for a longer period of time than
+ originally communicated. This also applies if the reporting node
+ wishes to shorten the period of time that overload abatement is to
+ continue.
+
+ A reporting node MUST update an OCS entry when it wishes to adjust
+ any parameters specific to the abatement algorithm, including, for
+ example, the reduction percentage used for the loss abatement
+ algorithm.
+
+ Example: If a reporting node wishes to change the reduction
+ percentage either higher (if the overload condition has worsened)
+ or lower (if the overload condition has improved), then the
+ reporting node would update the appropriate OCS entry.
+
+ A reporting node MUST increment the sequence number associated with
+ the OCS entry anytime the contents of the OCS entry are changed.
+ This will result in a new sequence number being sent to reacting
+ nodes, instructing them to process the OC-OLR AVP.
+
+ A reporting node SHOULD update an OCS entry with a validity duration
+ of zero ("0") when the overload condition ends.
+
+ Note: If a reporting node knows that the OCS entries in the
+ reacting nodes are near expiration, then the reporting node might
+ decide not to send an OLR with a validity duration of zero.
+
+ A reporting node MUST keep an OCS entry with a validity duration of
+ zero ("0") for a period of time long enough to ensure that any
+ unexpired reacting node's OCS entry created as a result of the
+ overload condition in the reporting node is deleted.
+
+5.2.2. Reacting Node Behavior
+
+ When a reacting node sends a request, it MUST determine if that
+ request matches an active OCS.
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 19]
+
+RFC 7683 DOIC October 2015
+
+
+ If the request matches an active OCS, then the reacting node MUST use
+ the overload abatement algorithm indicated in the OCS to determine if
+ the request is to receive overload abatement treatment.
+
+ For the loss abatement algorithm defined in this specification, see
+ Section 6 for the overload abatement algorithm logic applied.
+
+ If the overload abatement algorithm selects the request for overload
+ abatement treatment, then the reacting node MUST apply overload
+ abatement treatment on the request. The abatement treatment applied
+ depends on the context of the request.
+
+ If diversion abatement treatment is possible (i.e., a different path
+ for the request can be selected where the overloaded node is not part
+ of the different path), then the reacting node SHOULD apply diversion
+ abatement treatment to the request. The reacting node MUST apply
+ throttling abatement treatment to requests identified for abatement
+ treatment when diversion treatment is not possible or was not
+ applied.
+
+ Note: This only addresses the case where there are two defined
+ abatement treatments, diversion and throttling. Any extension
+ that defines a new abatement treatment must also define its
+ interaction with existing treatments.
+
+ If the overload abatement treatment results in throttling of the
+ request and if the reacting node is an agent, then the agent MUST
+ send an appropriate error as defined in Section 8.
+
+ Diameter endpoints that throttle requests need to do so according to
+ the rules of the client application. Those rules will vary by
+ application and are beyond the scope of this document.
+
+ In the case that the OCS entry indicated no traffic was to be sent to
+ the overloaded entity and the validity duration expires, then
+ overload abatement associated with the overload report MUST be ended
+ in a controlled fashion.
+
+5.2.3. Reporting Node Behavior
+
+ If there is an active OCS entry, then a reporting node SHOULD include
+ the OC-OLR AVP in all answers to requests that contain the
+ OC-Supported-Features AVP and that match the active OCS entry.
+
+ Note: A request matches 1) if the Application-ID in the request
+ matches the Application-ID in any active OCS entry and 2) if the
+ report type in the OCS entry matches a report type supported by
+ the reporting node as indicated in the OC-Supported-Features AVP.
+
+
+
+Korhonen, et al. Standards Track [Page 20]
+
+RFC 7683 DOIC October 2015
+
+
+ The contents of the OC-OLR AVP depend on the selected algorithm.
+
+ A reporting node MAY choose to not resend an overload report to a
+ reacting node if it can guarantee that this overload report is
+ already active in the reacting node.
+
+ Note: In some cases (e.g., when there are one or more agents in
+ the path between reporting and reacting nodes, or when overload
+ reports are discarded by reacting nodes), a reporting node may not
+ be able to guarantee that the reacting node has received the
+ report.
+
+ A reporting node MUST NOT send overload reports of a type that has
+ not been advertised as supported by the reacting node.
+
+ Note: A reacting node implicitly advertises support for the host
+ and realm report types by including the OC-Supported-Features AVP
+ in the request. Support for other report types will be explicitly
+ indicated by new feature bits in the OC-Feature-Vector AVP.
+
+ A reporting node SHOULD explicitly indicate the end of an overload
+ occurrence by sending a new OLR with OC-Validity-Duration set to a
+ value of zero ("0"). The reporting node SHOULD ensure that all
+ reacting nodes receive the updated overload report.
+
+ A reporting node MAY rely on the OC-Validity-Duration AVP values for
+ the implicit cleanup of overload control state on the reacting node.
+
+ Note: All OLRs sent have an expiration time calculated by adding
+ the validity duration contained in the OLR to the time the message
+ was sent. Transit time for the OLR can be safely ignored. The
+ reporting node can ensure that all reacting nodes have received
+ the OLR by continuing to send it in answer messages until the
+ expiration time for all OLRs sent for that overload condition have
+ expired.
+
+ When a reporting node sends an OLR, it effectively delegates any
+ necessary throttling to downstream nodes. If the reporting node also
+ locally throttles the same set of messages, the overall number of
+ throttled requests may be higher than intended. Therefore, before
+ applying local message throttling, a reporting node needs to check if
+ these messages match existing OCS entries, indicating that these
+ messages have survived throttling applied by downstream nodes that
+ have received the related OLR.
+
+ However, even if the set of messages match existing OCS entries, the
+ reporting node can still apply other abatement methods such as
+ diversion. The reporting node might also need to throttle requests
+
+
+
+Korhonen, et al. Standards Track [Page 21]
+
+RFC 7683 DOIC October 2015
+
+
+ for reasons other than overload. For example, an agent or server
+ might have a configured rate limit for each client and might throttle
+ requests that exceed that limit, even if such requests had already
+ been candidates for throttling by downstream nodes. The reporting
+ node also has the option to send new OLRs requesting greater
+ reductions in traffic, reducing the need for local throttling.
+
+ A reporting node SHOULD decrease requested overload abatement
+ treatment in a controlled fashion to avoid oscillations in traffic.
+
+ Example: A reporting node might wait some period of time after
+ overload ends before terminating the OLR, or it might send a
+ series of OLRs indicating progressively less overload severity.
+
+5.3. Protocol Extensibility
+
+ The DOIC solution can be extended. Types of potential extensions
+ include new traffic abatement algorithms, new report types, or other
+ new functionality.
+
+ When defining a new extension that requires new normative behavior,
+ the specification must define a new feature for the OC-Feature-Vector
+ AVP. This feature bit is used to communicate support for the new
+ feature.
+
+ The extension may define new AVPs for use in the DOIC Capability
+ Announcement and for use in DOIC overload reporting. These new AVPs
+ SHOULD be defined to be extensions to the OC-Supported-Features or
+ OC-OLR AVPs defined in this document.
+
+ The Grouped AVP extension mechanisms defined in [RFC6733] apply.
+ This allows, for example, defining a new feature that is mandatory to
+ be understood even when piggybacked on an existing application.
+
+ When defining new report type values, the corresponding specification
+ must define the semantics of the new report types and how they affect
+ the OC-OLR AVP handling.
+
+ The OC-Supported-Feature and OC-OLR AVPs can be expanded with
+ optional sub-AVPs only if a legacy DOIC implementation can safely
+ ignore them without breaking backward compatibility for the given
+ OC-Report-Type AVP value. Any new sub-AVPs must not require that the
+ M-bit be set.
+
+ Documents that introduce new report types must describe any
+ limitations on their use across non-supporting agents.
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 22]
+
+RFC 7683 DOIC October 2015
+
+
+ As with any Diameter specification, RFC 6733 requires all new AVPs to
+ be registered with IANA. See Section 9 for the required procedures.
+ New features (feature bits in the OC-Feature-Vector AVP) and report
+ types (in the OC-Report-Type AVP) MUST be registered with IANA.
+
+6. Loss Algorithm
+
+ This section documents the Diameter overload loss abatement
+ algorithm.
+
+6.1. Overview
+
+ The DOIC specification supports the ability for multiple overload
+ abatement algorithms to be specified. The abatement algorithm used
+ for any instance of overload is determined by the DOIC Capability
+ Announcement process documented in Section 5.1.
+
+ The loss algorithm described in this section is the default algorithm
+ that must be supported by all Diameter nodes that support DOIC.
+
+ The loss algorithm is designed to be a straightforward and stateless
+ overload abatement algorithm. It is used by reporting nodes to
+ request a percentage reduction in the amount of traffic sent. The
+ traffic impacted by the requested reduction depends on the type of
+ overload report.
+
+ Reporting nodes request the stateless reduction of the number of
+ requests by an indicated percentage. This percentage reduction is in
+ comparison to the number of messages the node otherwise would send,
+ regardless of how many requests the node might have sent in the past.
+
+ From a conceptual level, the logic at the reacting node could be
+ outlined as follows.
+
+ 1. An overload report is received, and the associated OCS is either
+ saved or updated (if required) by the reacting node.
+
+ 2. A new Diameter request is generated by the application running on
+ the reacting node.
+
+ 3. The reacting node determines that an active overload report
+ applies to the request, as indicated by the corresponding OCS
+ entry.
+
+ 4. The reacting node determines if overload abatement treatment
+ should be applied to the request. One approach that could be
+ taken for each request is to select a uniformly selected random
+ number between 1 and 100. If the random number is less than or
+
+
+
+Korhonen, et al. Standards Track [Page 23]
+
+RFC 7683 DOIC October 2015
+
+
+ equal to the indicated reduction percentage, then the request is
+ given abatement treatment; otherwise, the request is given normal
+ routing treatment.
+
+6.2. Reporting Node Behavior
+
+ The method a reporting node uses to determine the amount of traffic
+ reduction required to address an overload condition is an
+ implementation decision.
+
+ When a reporting node that has selected the loss abatement algorithm
+ determines the need to request a reduction in traffic, it includes an
+ OC-OLR AVP in answer messages as described in Section 5.2.3.
+
+ When sending the OC-OLR AVP, the reporting node MUST indicate a
+ percentage reduction in the OC-Reduction-Percentage AVP.
+
+ The reporting node MAY change the reduction percentage in subsequent
+ overload reports. When doing so, the reporting node must conform to
+ overload report handling specified in Section 5.2.3.
+
+6.3. Reacting Node Behavior
+
+ The method a reacting node uses to determine which request messages
+ are given abatement treatment is an implementation decision.
+
+ When receiving an OC-OLR in an answer message where the algorithm
+ indicated in the OC-Supported-Features AVP is the loss algorithm, the
+ reacting node MUST apply abatement treatment to the requested
+ percentage of request messages sent.
+
+ Note: The loss algorithm is a stateless algorithm. As a result,
+ the reacting node does not guarantee that there will be an
+ absolute reduction in traffic sent. Rather, it guarantees that
+ the requested percentage of new requests will be given abatement
+ treatment.
+
+ If the reacting node comes out of the 100% traffic reduction
+ (meaning, it has received an OLR indicating that no traffic should be
+ sent, as a result of the overload report timing out), the reacting
+ node sending the traffic SHOULD be conservative and, for example,
+ first send "probe" messages to learn the overload condition of the
+ overloaded node before converging to any traffic amount/rate decided
+ by the sender. Similar concerns apply in all cases when the overload
+ report times out, unless the previous overload report stated 0%
+ reduction.
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 24]
+
+RFC 7683 DOIC October 2015
+
+
+ Note: The goal of this behavior is to reduce the probability of
+ overload condition thrashing where an immediate transition from
+ 100% reduction to 0% reduction results in the reporting node
+ moving quickly back into an overload condition.
+
+7. Attribute Value Pairs
+
+ This section describes the encoding and semantics of the Diameter
+ Overload Indication Attribute Value Pairs (AVPs) defined in this
+ document.
+
+ Refer to Section 4 of [RFC6733] for more information on AVPs and AVP
+ data types.
+
+7.1. OC-Supported-Features AVP
+
+ The OC-Supported-Features AVP (AVP Code 621) is of type Grouped and
+ serves two purposes. First, it announces a node's support for the
+ DOIC solution in general. Second, it contains the description of the
+ supported DOIC features of the sending node. The OC-Supported-
+ Features AVP MUST be included in every Diameter request message a
+ DOIC supporting node sends.
+
+ OC-Supported-Features ::= < AVP Header: 621 >
+ [ OC-Feature-Vector ]
+ * [ AVP ]
+
+7.2. OC-Feature-Vector AVP
+
+ The OC-Feature-Vector AVP (AVP Code 622) is of type Unsigned64 and
+ contains a 64-bit flags field of announced capabilities of a DOIC
+ node. The value of zero (0) is reserved.
+
+ The OC-Feature-Vector sub-AVP is used to announce the DOIC features
+ supported by the DOIC node, in the form of a flag-bits field in which
+ each bit announces one feature or capability supported by the node.
+ The absence of the OC-Feature-Vector AVP in request messages
+ indicates that only the default traffic abatement algorithm described
+ in this specification is supported. The absence of the OC-Feature-
+ Vector AVP in answer messages indicates that the default traffic
+ abatement algorithm described in this specification is selected
+ (while other traffic abatement algorithms may be supported), and no
+ features other than abatement algorithms are supported.
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 25]
+
+RFC 7683 DOIC October 2015
+
+
+ The following capability is defined in this document:
+
+ OLR_DEFAULT_ALGO (0x0000000000000001)
+
+ When this flag is set by the a DOIC reacting node, it means that
+ the default traffic abatement (loss) algorithm is supported. When
+ this flag is set by a DOIC reporting node, it means that the loss
+ algorithm will be used for requested overload abatement.
+
+7.3. OC-OLR AVP
+
+ The OC-OLR AVP (AVP Code 623) is of type Grouped and contains the
+ information necessary to convey an overload report on an overload
+ condition at the reporting node. The application the OC-OLR AVP
+ applies to is identified by the Application-ID found in the Diameter
+ message header. The host or realm the OC-OLR AVP concerns is
+ determined from the Origin-Host AVP and/or Origin-Realm AVP found in
+ the encapsulating Diameter command. The OC-OLR AVP is intended to be
+ sent only by a reporting node.
+
+ OC-OLR ::= < AVP Header: 623 >
+ < OC-Sequence-Number >
+ < OC-Report-Type >
+ [ OC-Reduction-Percentage ]
+ [ OC-Validity-Duration ]
+ * [ AVP ]
+
+7.4. OC-Sequence-Number AVP
+
+ The OC-Sequence-Number AVP (AVP Code 624) is of type Unsigned64. Its
+ usage in the context of overload control is described in Section 5.2.
+
+ From the functionality point of view, the OC-Sequence-Number AVP is
+ used as a nonvolatile increasing counter for a sequence of overload
+ reports between two DOIC nodes for the same overload occurrence.
+ Sequence numbers are treated in a unidirectional manner, i.e., two
+ sequence numbers in each direction between two DOIC nodes are not
+ related or correlated.
+
+7.5. OC-Validity-Duration AVP
+
+ The OC-Validity-Duration AVP (AVP Code 625) is of type Unsigned32 and
+ indicates in seconds the validity time of the overload report. The
+ number of seconds is measured after reception of the first OC-OLR AVP
+ with a given value of OC-Sequence-Number AVP. The default value for
+ the OC-Validity-Duration AVP is 30 seconds. When the OC-Validity-
+ Duration AVP is not present in the OC-OLR AVP, the default value
+ applies. The maximum value for the OC-Validity-Duration AVP is
+
+
+
+Korhonen, et al. Standards Track [Page 26]
+
+RFC 7683 DOIC October 2015
+
+
+ 86,400 seconds (24 hours). If the value received in the OC-Validity-
+ Duration is greater than the maximum value, then the default value
+ applies.
+
+7.6. OC-Report-Type AVP
+
+ The OC-Report-Type AVP (AVP Code 626) is of type Enumerated. The
+ value of the AVP describes what the overload report concerns. The
+ following values are initially defined:
+
+ HOST_REPORT 0
+ The overload report is for a host. Overload abatement treatment
+ applies to host-routed requests.
+
+ REALM_REPORT 1
+ The overload report is for a realm. Overload abatement treatment
+ applies to realm-routed requests.
+
+ The values 2-4294967295 are unassigned.
+
+7.7. OC-Reduction-Percentage AVP
+
+ The OC-Reduction-Percentage AVP (AVP Code 627) is of type Unsigned32
+ and describes the percentage of the traffic that the sender is
+ requested to reduce, compared to what it otherwise would send. The
+ OC-Reduction-Percentage AVP applies to the default (loss) algorithm
+ specified in this specification. However, the AVP can be reused for
+ future abatement algorithms, if its semantics fit into the new
+ algorithm.
+
+ The value of the Reduction-Percentage AVP is between zero (0) and one
+ hundred (100). Values greater than 100 are ignored. The value of
+ 100 means that all traffic is to be throttled, i.e., the reporting
+ node is under a severe load and ceases to process any new messages.
+ The value of 0 means that the reporting node is in a stable state and
+ has no need for the reacting node to apply any traffic abatement.
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 27]
+
+RFC 7683 DOIC October 2015
+
+
+7.8. AVP Flag Rules
+
+ +---------+
+ |AVP flag |
+ |rules |
+ +----+----+
+ AVP Section | |MUST|
+ Attribute Name Code Defined Value Type |MUST| NOT|
+ +--------------------------------------------------+----+----+
+ |OC-Supported-Features 621 7.1 Grouped | | V |
+ +--------------------------------------------------+----+----+
+ |OC-Feature-Vector 622 7.2 Unsigned64 | | V |
+ +--------------------------------------------------+----+----+
+ |OC-OLR 623 7.3 Grouped | | V |
+ +--------------------------------------------------+----+----+
+ |OC-Sequence-Number 624 7.4 Unsigned64 | | V |
+ +--------------------------------------------------+----+----+
+ |OC-Validity-Duration 625 7.5 Unsigned32 | | V |
+ +--------------------------------------------------+----+----+
+ |OC-Report-Type 626 7.6 Enumerated | | V |
+ +--------------------------------------------------+----+----+
+ |OC-Reduction | | |
+ | -Percentage 627 7.7 Unsigned32 | | V |
+ +--------------------------------------------------+----+----+
+
+ As described in the Diameter base protocol [RFC6733], the M-bit usage
+ for a given AVP in a given command may be defined by the application.
+
+8. Error Response Codes
+
+ When a DOIC node rejects a Diameter request due to overload, the DOIC
+ node MUST select an appropriate error response code. This
+ determination is made based on the probability of the request
+ succeeding if retried on a different path.
+
+ Note: This only applies for DOIC nodes that are not the originator
+ of the request.
+
+ A reporting node rejecting a Diameter request due to an overload
+ condition SHOULD send a DIAMETER_TOO_BUSY error response, if it can
+ assume that the same request may succeed on a different path.
+
+ If a reporting node knows or assumes that the same request will not
+ succeed on a different path, the DIAMETER_UNABLE_TO_COMPLY error
+ response SHOULD be used. Retrying would consume valuable resources
+ during an occurrence of overload.
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 28]
+
+RFC 7683 DOIC October 2015
+
+
+ For instance, if the request arrived at the reporting node without
+ a Destination-Host AVP, then the reporting node might determine
+ that there is an alternative Diameter node that could successfully
+ process the request and that retrying the transaction would not
+ negatively impact the reporting node. DIAMETER_TOO_BUSY would be
+ sent in this case.
+
+ If the request arrived at the reporting node with a Destination-
+ Host AVP populated with its own Diameter identity, then the
+ reporting node can assume that retrying the request would result
+ in it coming to the same reporting node.
+ DIAMETER_UNABLE_TO_COMPLY would be sent in this case.
+
+ A second example is when an agent that supports the DOIC solution
+ is performing the role of a reacting node for a non-supporting
+ client. Requests that are rejected as a result of DOIC throttling
+ by the agent in this scenario would generally be rejected with a
+ DIAMETER_UNABLE_TO_COMPLY response code.
+
+9. IANA Considerations
+
+9.1. AVP Codes
+
+ New AVPs defined by this specification are listed in Section 7. All
+ AVP codes are allocated from the "AVP Codes" sub-registry under the
+ "Authentication, Authorization, and Accounting (AAA) Parameters"
+ registry.
+
+9.2. New Registries
+
+ Two new registries have been created in the "AVP Specific Values"
+ sub-registry under the "Authentication, Authorization, and Accounting
+ (AAA) Parameters" registry.
+
+ A new "OC-Feature-Vector AVP Values (code 622)" registry has been
+ created. This registry contains the following:
+
+ Feature Vector Value Name
+
+ Feature Vector Value
+
+ Specification defining the new value
+
+ See Section 7.2 for the initial Feature Vector Value in the registry.
+ This specification defines the value. New values can be added to the
+ registry using the Specification Required policy [RFC5226].
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 29]
+
+RFC 7683 DOIC October 2015
+
+
+ A new "OC-Report-Type AVP Values (code 626)" registry has been
+ created. This registry contains the following:
+
+ Report Type Value Name
+
+ Report Type Value
+
+ Specification defining the new value
+
+ See Section 7.6 for the initial assignment in the registry. New
+ types can be added using the Specification Required policy [RFC5226].
+
+10. Security Considerations
+
+ DOIC gives Diameter nodes the ability to request that downstream
+ nodes send fewer Diameter requests. Nodes do this by exchanging
+ overload reports that directly effect this reduction. This exchange
+ is potentially subject to multiple methods of attack and has the
+ potential to be used as a denial-of-service (DoS) attack vector. For
+ instance, a series of injected realm OLRs with a requested reduction
+ percentage of 100% could be used to completely eliminate any traffic
+ from being sent to that realm.
+
+ Overload reports may contain information about the topology and
+ current status of a Diameter network. This information is
+ potentially sensitive. Network operators may wish to control
+ disclosure of overload reports to unauthorized parties to avoid their
+ use for competitive intelligence or to target attacks.
+
+ Diameter does not include features to provide end-to-end
+ authentication, integrity protection, or confidentiality. This may
+ cause complications when sending overload reports between non-
+ adjacent nodes.
+
+10.1. Potential Threat Modes
+
+ The Diameter protocol involves transactions in the form of requests
+ and answers exchanged between clients and servers. These clients and
+ servers may be peers, that is, they may share a direct transport
+ (e.g., TCP or SCTP) connection, or the messages may traverse one or
+ more intermediaries, known as Diameter Agents. Diameter nodes use
+ TLS, DTLS, or IPsec to authenticate peers and to provide
+ confidentiality and integrity protection of traffic between peers.
+ Nodes can make authorization decisions based on the peer identities
+ authenticated at the transport layer.
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 30]
+
+RFC 7683 DOIC October 2015
+
+
+ When agents are involved, this presents an effectively transitive
+ trust model. That is, a Diameter client or server can authorize an
+ agent for certain actions, but it must trust that agent to make
+ appropriate authorization decisions about its peers, and so on.
+ Since confidentiality and integrity protection occur at the transport
+ layer, agents can read, and perhaps modify, any part of a Diameter
+ message, including an overload report.
+
+ There are several ways an attacker might attempt to exploit the
+ overload control mechanism. An unauthorized third party might inject
+ an overload report into the network. If this third party is upstream
+ of an agent, and that agent fails to apply proper authorization
+ policies, downstream nodes may mistakenly trust the report. This
+ attack is at least partially mitigated by the assumption that nodes
+ include overload reports in Diameter answers but not in requests.
+ This requires an attacker to have knowledge of the original request
+ in order to construct an answer. Such an answer would also need to
+ arrive at a Diameter node via a protected transport connection.
+ Therefore, implementations MUST validate that an answer containing an
+ overload report is a properly constructed response to a pending
+ request prior to acting on the overload report, and that the answer
+ was received via an appropriate transport connection.
+
+ A similar attack involves a compromised but otherwise authorized node
+ that sends an inappropriate overload report. For example, a server
+ for the realm "example.com" might send an overload report indicating
+ that a competitor's realm "example.net" is overloaded. If other
+ nodes act on the report, they may falsely believe that "example.net"
+ is overloaded, effectively reducing that realm's capacity.
+ Therefore, it's critical that nodes validate that an overload report
+ received from a peer actually falls within that peer's responsibility
+ before acting on the report or forwarding the report to other peers.
+ For example, an overload report from a peer that applies to a realm
+ not handled by that peer is suspect. This may require out-of-band,
+ non-Diameter agreements and/or mechanisms.
+
+ This attack is partially mitigated by the fact that the
+ application, as well as host and realm, for a given OLR is
+ determined implicitly by respective AVPs in the enclosing answer.
+ If a reporting node modifies any of those AVPs, the enclosing
+ transaction will also be affected.
+
+10.2. Denial-of-Service Attacks
+
+ Diameter overload reports, especially realm reports, can cause a node
+ to cease sending some or all Diameter requests for an extended
+ period. This makes them a tempting vector for DoS attacks.
+ Furthermore, since Diameter is almost always used in support of other
+
+
+
+Korhonen, et al. Standards Track [Page 31]
+
+RFC 7683 DOIC October 2015
+
+
+ protocols, a DoS attack on Diameter is likely to impact those
+ protocols as well. In the worst case, where the Diameter application
+ is being used for access control into an IP network, a coordinated
+ DoS attack could result in the blockage of all traffic into that
+ network. Therefore, Diameter nodes MUST NOT honor or forward OLRs
+ received from peers that are not trusted to send them.
+
+ An attacker might use the information in an OLR to assist in DoS
+ attacks. For example, an attacker could use information about
+ current overload conditions to time an attack for maximum effect, or
+ use subsequent overload reports as a feedback mechanism to learn the
+ results of a previous or ongoing attack. Operators need the ability
+ to ensure that OLRs are not leaked to untrusted parties.
+
+10.3. Noncompliant Nodes
+
+ In the absence of an overload control mechanism, Diameter nodes need
+ to implement strategies to protect themselves from floods of
+ requests, and to make sure that a disproportionate load from one
+ source does not prevent other sources from receiving service. For
+ example, a Diameter server might throttle a certain percentage of
+ requests from sources that exceed certain limits. Overload control
+ can be thought of as an optimization for such strategies, where
+ downstream nodes never send the excess requests in the first place.
+ However, the presence of an overload control mechanism does not
+ remove the need for these other protection strategies.
+
+ When a Diameter node sends an overload report, it cannot assume that
+ all nodes will comply, even if they indicate support for DOIC. A
+ noncompliant node might continue to send requests with no reduction
+ in load. Such noncompliance could be done accidentally or
+ maliciously to gain an unfair advantage over compliant nodes.
+ Requirement 28 in [RFC7068] indicates that the overload control
+ solution cannot assume that all Diameter nodes in a network are
+ trusted. It also requires that malicious nodes not be allowed to
+ take advantage of the overload control mechanism to get more than
+ their fair share of service.
+
+10.4. End-to-End Security Issues
+
+ The lack of end-to-end integrity features makes it difficult to
+ establish trust in overload reports received from non-adjacent nodes.
+ Any agents in the message path may insert or modify overload reports.
+ Nodes must trust that their adjacent peers perform proper checks on
+ overload reports from their peers, and so on, creating a transitive-
+ trust requirement extending for potentially long chains of nodes.
+ Network operators must determine if this transitive trust requirement
+ is acceptable for their deployments. Nodes supporting Diameter
+
+
+
+Korhonen, et al. Standards Track [Page 32]
+
+RFC 7683 DOIC October 2015
+
+
+ overload control MUST give operators the ability to select which
+ peers are trusted to deliver overload reports and whether they are
+ trusted to forward overload reports from non-adjacent nodes. DOIC
+ nodes MUST strip DOIC AVPs from messages received from peers that are
+ not trusted for DOIC purposes.
+
+ The lack of end-to-end confidentiality protection means that any
+ Diameter Agent in the path of an overload report can view the
+ contents of that report. In addition to the requirement to select
+ which peers are trusted to send overload reports, operators MUST be
+ able to select which peers are authorized to receive reports. A node
+ MUST NOT send an overload report to a peer not authorized to receive
+ it. Furthermore, an agent MUST remove any overload reports that
+ might have been inserted by other nodes before forwarding a Diameter
+ message to a peer that is not authorized to receive overload reports.
+
+ A DOIC node cannot always automatically detect that a peer also
+ supports DOIC. For example, a node might have a peer that is a
+ non-supporting agent. If nodes on the other side of that agent
+ send OC-Supported-Features AVPs, the agent is likely to forward
+ them as unknown AVPs. Messages received across the non-supporting
+ agent may be indistinguishable from messages received across a
+ DOIC supporting agent, giving the false impression that the non-
+ supporting agent actually supports DOIC. This complicates the
+ transitive-trust nature of DOIC. Operators need to be careful to
+ avoid situations where a non-supporting agent is mistakenly
+ trusted to enforce DOIC-related authorization policies.
+
+ It is expected that work on end-to-end Diameter security might make
+ it easier to establish trust in non-adjacent nodes for overload
+ control purposes. Readers should be reminded, however, that the
+ overload control mechanism allows Diameter Agents to modify AVPs in,
+ or insert additional AVPs into, existing messages that are originated
+ by other nodes. If end-to-end security is enabled, there is a risk
+ that such modification could violate integrity protection. The
+ details of using any future Diameter end-to-end security mechanism
+ with overload control will require careful consideration, and are
+ beyond the scope of this document.
+
+
+
+
+
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 33]
+
+RFC 7683 DOIC October 2015
+
+
+11. References
+
+11.1. Normative References
+
+ [RFC2119] Bradner, S., "Key words for use in RFCs to Indicate
+ Requirement Levels", BCP 14, RFC 2119,
+ DOI 10.17487/RFC2119, March 1997,
+ <http://www.rfc-editor.org/info/rfc2119>.
+
+ [RFC5226] Narten, T. and H. Alvestrand, "Guidelines for Writing an
+ IANA Considerations Section in RFCs", BCP 26, RFC 5226,
+ DOI 10.17487/RFC5226, May 2008,
+ <http://www.rfc-editor.org/info/rfc5226>.
+
+ [RFC6733] Fajardo, V., Ed., Arkko, J., Loughney, J., and G. Zorn,
+ Ed., "Diameter Base Protocol", RFC 6733,
+ DOI 10.17487/RFC6733, October 2012,
+ <http://www.rfc-editor.org/info/rfc6733>.
+
+11.2. Informative References
+
+ [Cx] 3GPP, "Cx and Dx interfaces based on the Diameter
+ protocol; Protocol details", 3GPP TS 29.229 12.7.0,
+ September 2015.
+
+ [PCC] 3GPP, "Policy and charging control architecture", 3GPP
+ TS 23.203 12.10.0, September 2015.
+
+ [RFC4006] Hakala, H., Mattila, L., Koskinen, J-P., Stura, M., and J.
+ Loughney, "Diameter Credit-Control Application", RFC 4006,
+ DOI 10.17487/RFC4006, August 2005,
+ <http://www.rfc-editor.org/info/rfc4006>.
+
+ [RFC7068] McMurry, E. and B. Campbell, "Diameter Overload Control
+ Requirements", RFC 7068, DOI 10.17487/RFC7068, November
+ 2013, <http://www.rfc-editor.org/info/rfc7068>.
+
+ [S13] 3GPP, "Evolved Packet System (EPS); Mobility Management
+ Entity (MME) and Serving GPRS Support Node (SGSN) related
+ interfaces based on Diameter protocol", 3GPP TS 29.272
+ 12.8.0, September 2015.
+
+
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 34]
+
+RFC 7683 DOIC October 2015
+
+
+Appendix A. Issues Left for Future Specifications
+
+ The base solution for overload control does not cover all possible
+ use cases. A number of solution aspects were intentionally left for
+ future specification and protocol work. The following subsections
+ define some of the potential extensions to the DOIC solution.
+
+A.1. Additional Traffic Abatement Algorithms
+
+ This specification describes only means for a simple loss-based
+ algorithm. Future algorithms can be added using the designed
+ solution extension mechanism. The new algorithms need to be
+ registered with IANA. See Sections 7.2 and 9 for the required IANA
+ steps.
+
+A.2. Agent Overload
+
+ This specification focuses on Diameter endpoint (server or client)
+ overload. A separate extension will be required to outline the
+ handling of the case of agent overload.
+
+A.3. New Error Diagnostic AVP
+
+ This specification indicates the use of existing error messages when
+ nodes reject requests due to overload. There is an expectation that
+ additional error codes or AVPs will be defined in a separate
+ specification to indicate that overload was the reason for the
+ rejection of the message.
+
+Appendix B. Deployment Considerations
+
+ Non-supporting Agents
+
+ Due to the way that realm-routed requests are handled in Diameter
+ networks with the server selection for the request done by an
+ agent, network operators should enable DOIC at agents that perform
+ server selection first.
+
+ Topology-Hiding Interactions
+
+ There exist proxies that implement what is referred to as Topology
+ Hiding. This can include cases where the agent modifies the
+ Origin-Host in answer messages. The behavior of the DOIC solution
+ is not well understood when this happens. As such, the DOIC
+ solution does not address this scenario.
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 35]
+
+RFC 7683 DOIC October 2015
+
+
+ Inter-Realm/Administrative Domain Considerations
+
+ There are likely to be special considerations for handling DOIC
+ signaling across administrative boundaries. This includes
+ considerations for whether or not information included in the DOIC
+ signaling should be sent across those boundaries. In addition,
+ consideration should be taken as to whether or not a reacting node
+ in one realm can be trusted to implement the requested overload
+ abatement handling for overload reports received from a separately
+ administered realm.
+
+Appendix C. Considerations for Applications Integrating the DOIC
+ Solution
+
+ This section outlines considerations to be taken into account when
+ integrating the DOIC solution into Diameter applications.
+
+C.1. Application Classification
+
+ The following is a classification of Diameter applications and
+ request types. This discussion is meant to document factors that
+ play into decisions made by the Diameter entity responsible for
+ handling overload reports.
+
+ Section 8.1 of [RFC6733] defines two state machines that imply two
+ types of applications, session-less and session-based applications.
+ The primary difference between these types of applications is the
+ lifetime of Session-Ids.
+
+ For session-based applications, the Session-Id is used to tie
+ multiple requests into a single session.
+
+ The Credit-Control application defined in [RFC4006] is an example of
+ a Diameter session-based application.
+
+ In session-less applications, the lifetime of the Session-Id is a
+ single Diameter transaction, i.e., the session is implicitly
+ terminated after a single Diameter transaction and a new Session-Id
+ is generated for each Diameter request.
+
+
+
+
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 36]
+
+RFC 7683 DOIC October 2015
+
+
+ For the purposes of this discussion, session-less applications are
+ further divided into two types of applications:
+
+ Stateless Applications:
+
+ Requests within a stateless application have no relationship to
+ each other. The 3GPP-defined S13 application is an example of a
+ stateless application [S13], where only a Diameter command is
+ defined between a client and a server and no state is maintained
+ between two consecutive transactions.
+
+ Pseudo-Session Applications:
+
+ Applications that do not rely on the Session-Id AVP for
+ correlation of application messages related to the same session
+ but use other session-related information in the Diameter requests
+ for this purpose. The 3GPP-defined Cx application [Cx] is an
+ example of a pseudo-session application.
+
+ The handling of overload reports must take the type of application
+ into consideration, as discussed in Appendix C.2.
+
+C.2. Implications of Application Type Overload
+
+ This section discusses considerations for mitigating overload
+ reported by a Diameter entity. This discussion focuses on the type
+ of application. Appendix C.3 discusses considerations for handling
+ various request types when the target server is known to be in an
+ overloaded state.
+
+ These discussions assume that the strategy for mitigating the
+ reported overload is to reduce the overall workload sent to the
+ overloaded entity. The concept of applying overload treatment to
+ requests targeted for an overloaded Diameter entity is inherent to
+ this discussion. The method used to reduce offered load is not
+ specified here, but it could include routing requests to another
+ Diameter entity known to be able to handle them, or it could mean
+ rejecting certain requests. For a Diameter Agent, rejecting requests
+ will usually mean generating appropriate Diameter error responses.
+ For a Diameter client, rejecting requests will depend upon the
+ application. For example, it could mean giving an indication to the
+ entity requesting the Diameter service that the network is busy and
+ to try again later.
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 37]
+
+RFC 7683 DOIC October 2015
+
+
+ Stateless Applications:
+
+ By definition, there is no relationship between individual
+ requests in a stateless application. As a result, when a request
+ is sent or relayed to an overloaded Diameter entity -- either a
+ Diameter Server or a Diameter Agent -- the sending or relaying
+ entity can choose to apply the overload treatment to any request
+ targeted for the overloaded entity.
+
+ Pseudo-session Applications:
+
+ For pseudo-session applications, there is an implied ordering of
+ requests. As a result, decisions about which requests towards an
+ overloaded entity to reject could take the command code of the
+ request into consideration. This generally means that
+ transactions later in the sequence of transactions should be given
+ more favorable treatment than messages earlier in the sequence.
+ This is because more work has already been done by the Diameter
+ network for those transactions that occur later in the sequence.
+ Rejecting them could result in increasing the load on the network
+ as the transactions earlier in the sequence might also need to be
+ repeated.
+
+ Session-Based Applications:
+
+ Overload handling for session-based applications must take into
+ consideration the work load associated with setting up and
+ maintaining a session. As such, the entity sending requests
+ towards an overloaded Diameter entity for a session-based
+ application might tend to reject new session requests prior to
+ rejecting intra-session requests. In addition, session-ending
+ requests might be given a lower probability of being rejected, as
+ rejecting session-ending requests could result in session status
+ being out of sync between the Diameter clients and servers.
+ Application designers that would decide to reject mid-session
+ requests will need to consider whether the rejection invalidates
+ the session and any resulting session cleanup procedures.
+
+C.3. Request Transaction Classification
+
+ Independent Request:
+
+ An independent request is not correlated to any other requests,
+ and, as such, the lifetime of the Session-Id is constrained to an
+ individual transaction.
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 38]
+
+RFC 7683 DOIC October 2015
+
+
+ Session-Initiating Request:
+
+ A session-initiating request is the initial message that
+ establishes a Diameter session. The ACR message defined in
+ [RFC6733] is an example of a session-initiating request.
+
+ Correlated Session-Initiating Request:
+
+ There are cases when multiple session-initiated requests must be
+ correlated and managed by the same Diameter server. It is notably
+ the case in the 3GPP Policy and Charging Control (PCC)
+ architecture [PCC], where multiple apparently independent Diameter
+ application sessions are actually correlated and must be handled
+ by the same Diameter server.
+
+ Intra-session Request:
+
+ An intra-session request is a request that uses the same Session-
+ Id as the one used in a previous request. An intra-session
+ request generally needs to be delivered to the server that handled
+ the session-creating request for the session. The STR message
+ defined in [RFC6733] is an example of an intra-session request.
+
+ Pseudo-session Requests:
+
+ Pseudo-session requests are independent requests and do not use
+ the same Session-Id but are correlated by other session-related
+ information contained in the request. There exist Diameter
+ applications that define an expected ordering of transactions.
+ This sequencing of independent transactions results in a pseudo-
+ session. The AIR, MAR, and SAR requests in the 3GPP-defined Cx
+ [Cx] application are examples of pseudo-session requests.
+
+C.4. Request Type Overload Implications
+
+ The request classes identified in Appendix C.3 have implications on
+ decisions about which requests should be throttled first. The
+ following list of request treatments regarding throttling is provided
+ as guidelines for application designers when implementing the
+ Diameter overload control mechanism described in this document. The
+ exact behavior regarding throttling is a matter of local policy,
+ unless specifically defined for the application.
+
+ Independent Requests:
+
+ Independent requests can generally be given equal treatment when
+ making throttling decisions, unless otherwise indicated by
+ application requirements or local policy.
+
+
+
+Korhonen, et al. Standards Track [Page 39]
+
+RFC 7683 DOIC October 2015
+
+
+ Session-Initiating Requests:
+
+ Session-initiating requests often represent more work than
+ independent or intra-session requests. Moreover, session-
+ initiating requests are typically followed by other session-
+ related requests. Since the main objective of overload control is
+ to reduce the total number of requests sent to the overloaded
+ entity, throttling decisions might favor allowing intra-session
+ requests over session-initiating requests. In the absence of
+ local policies or application-specific requirements to the
+ contrary, individual session-initiating requests can be given
+ equal treatment when making throttling decisions.
+
+ Correlated Session-Initiating Requests:
+
+ A request that results in a new binding; where the binding is used
+ for routing of subsequent session-initiating requests to the same
+ server, it represents more work load than other requests. As
+ such, these requests might be throttled more frequently than other
+ request types.
+
+ Pseudo-session Requests:
+
+ Throttling decisions for pseudo-session requests can take into
+ consideration where individual requests fit into the overall
+ sequence of requests within the pseudo-session. Requests that are
+ earlier in the sequence might be throttled more aggressively than
+ requests that occur later in the sequence.
+
+ Intra-session Requests:
+
+ There are two types of intra-sessions requests, requests that
+ terminate a session and the remainder of intra-session requests.
+ Implementers and operators may choose to throttle session-
+ terminating requests less aggressively in order to gracefully
+ terminate sessions, allow cleanup of the related resources (e.g.,
+ session state), and avoid the need for additional intra-session
+ requests. Favoring session termination requests may reduce the
+ session management impact on the overloaded entity. The default
+ handling of other intra-session requests might be to treat them
+ equally when making throttling decisions. There might also be
+ application-level considerations whether some request types are
+ favored over others.
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 40]
+
+RFC 7683 DOIC October 2015
+
+
+Contributors
+
+ The following people contributed substantial ideas, feedback, and
+ discussion to this document:
+
+ o Eric McMurry
+
+ o Hannes Tschofenig
+
+ o Ulrich Wiehe
+
+ o Jean-Jacques Trottin
+
+ o Maria Cruz Bartolome
+
+ o Martin Dolly
+
+ o Nirav Salot
+
+ o Susan Shishufeng
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 41]
+
+RFC 7683 DOIC October 2015
+
+
+Authors' Addresses
+
+ Jouni Korhonen (editor)
+ Broadcom Corporation
+ 3151 Zanker Road
+ San Jose, CA 95134
+ United States
+
+
+
+ Steve Donovan (editor)
+ Oracle
+ 7460 Warren Parkway
+ Frisco, Texas 75034
+ United States
+
+
+
+ Ben Campbell
+ Oracle
+ 7460 Warren Parkway
+ Frisco, Texas 75034
+ United States
+
+
+
+ Lionel Morand
+ Orange Labs
+ 38/40 rue du General Leclerc
+ Issy-Les-Moulineaux Cedex 9 92794
+ France
+
+ Phone: +33145296257
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+Korhonen, et al. Standards Track [Page 42]
+
diff --git a/lib/diameter/examples/code/client.erl b/lib/diameter/examples/code/client.erl
index 6fb90b1c09..0864919cdd 100644
--- a/lib/diameter/examples/code/client.erl
+++ b/lib/diameter/examples/code/client.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -39,7 +39,6 @@
-module(client).
-include_lib("diameter/include/diameter.hrl").
--include_lib("diameter/include/diameter_gen_base_rfc6733.hrl").
-export([start/1, %% start a service
start/2, %%
@@ -71,6 +70,7 @@
{'Product-Name', "Client"},
{'Auth-Application-Id', [0]},
{string_decode, false},
+ {decode_format, map},
{application, [{alias, common},
{dictionary, diameter_gen_base_rfc6733},
{module, client_cb}]}]).
@@ -108,9 +108,9 @@ connect(T) ->
call(Name) ->
SId = diameter:session_id(?L(Name)),
- RAR = #diameter_base_RAR{'Session-Id' = SId,
- 'Auth-Application-Id' = 0,
- 'Re-Auth-Request-Type' = 0},
+ RAR = ['RAR' | #{'Session-Id' => SId,
+ 'Auth-Application-Id' => 0,
+ 'Re-Auth-Request-Type' => 0}],
diameter:call(Name, common, RAR, []).
call() ->
diff --git a/lib/diameter/examples/code/client_cb.erl b/lib/diameter/examples/code/client_cb.erl
index ed1d3b9b7b..af2d4d6da7 100644
--- a/lib/diameter/examples/code/client_cb.erl
+++ b/lib/diameter/examples/code/client_cb.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -55,21 +55,18 @@ prepare_request(#diameter_packet{msg = ['RAR' = T | Avps]}, _, {_, Caps}) ->
origin_realm = {OR, DR}}
= Caps,
- {send, [T, {'Origin-Host', OH},
- {'Origin-Realm', OR},
- {'Destination-Host', DH},
- {'Destination-Realm', DR}
- | Avps]};
-
-prepare_request(#diameter_packet{msg = Rec}, _, {_, Caps}) ->
- #diameter_caps{origin_host = {OH, DH},
- origin_realm = {OR, DR}}
- = Caps,
-
- {send, Rec#diameter_base_RAR{'Origin-Host' = OH,
- 'Origin-Realm' = OR,
- 'Destination-Host' = DH,
- 'Destination-Realm' = DR}}.
+ {send, [T | if is_map(Avps) ->
+ Avps#{'Origin-Host' => OH,
+ 'Origin-Realm' => OR,
+ 'Destination-Host' => DH,
+ 'Destination-Realm' => DR};
+ is_list(Avps) ->
+ [{'Origin-Host', OH},
+ {'Origin-Realm', OR},
+ {'Destination-Host', DH},
+ {'Destination-Realm', DR}
+ | Avps]
+ end]}.
%% prepare_retransmit/3
diff --git a/lib/diameter/examples/code/node.erl b/lib/diameter/examples/code/node.erl
index 246be4194b..fc5830f8e2 100644
--- a/lib/diameter/examples/code/node.erl
+++ b/lib/diameter/examples/code/node.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -30,6 +30,8 @@
connect/2,
stop/1]).
+-export([message/3]).
+
-type protocol()
:: tcp | sctp.
@@ -128,6 +130,8 @@ stop(Name) ->
server_opts({T, Addr, Port}) ->
[{transport_module, tmod(T)},
{transport_config, [{reuseaddr, true},
+ {sender, true},
+ {message_cb, [fun ?MODULE:message/3, 0]},
{ip, addr(Addr)},
{port, Port}]}];
@@ -173,3 +177,26 @@ addr(loopback) ->
{127,0,0,1};
addr(A) ->
A.
+
+%% ---------------------------------------------------------------------------
+
+%% message/3
+%%
+%% Simple message callback that limits the number of concurrent
+%% requests on the peer connection in question.
+
+%% Incoming request.
+message(recv, <<_:32, 1:1, _/bits>> = Bin, N) ->
+ [Bin, N < 32, fun ?MODULE:message/3, N+1];
+
+%% Outgoing request.
+message(ack, <<_:32, 1:1, _/bits>>, _) ->
+ [];
+
+%% Incoming answer or request discarded.
+message(ack, _, N) ->
+ [N =< 32, fun ?MODULE:message/3, N-1];
+
+%% Outgoing message or incoming answer.
+message(_, Bin, _) ->
+ [Bin].
diff --git a/lib/diameter/include/diameter_gen.hrl b/lib/diameter/include/diameter_gen.hrl
index fb6370fe54..548763ec7d 100644
--- a/lib/diameter/include/diameter_gen.hrl
+++ b/lib/diameter/include/diameter_gen.hrl
@@ -26,13 +26,13 @@
%% encode_avps/3
-encode_avps(Name, Vals, Opts) ->
- diameter_gen:encode_avps(Name, Vals, Opts#{module => ?MODULE}).
+encode_avps(Name, Avps, Opts) ->
+ diameter_gen:encode_avps(Name, Avps, Opts#{module => ?MODULE}).
%% decode_avps/2
-decode_avps(Name, Recs, Opts) ->
- diameter_gen:decode_avps(Name, Recs, Opts#{module => ?MODULE}).
+decode_avps(Name, Avps, Opts) ->
+ diameter_gen:decode_avps(Name, Avps, Opts#{module => ?MODULE}).
%% avp/5
diff --git a/lib/diameter/src/Makefile b/lib/diameter/src/Makefile
index 6bf748a727..3af856f63e 100644
--- a/lib/diameter/src/Makefile
+++ b/lib/diameter/src/Makefile
@@ -1,7 +1,7 @@
#
# %CopyrightBegin%
#
-# Copyright Ericsson AB 2010-2016. All Rights Reserved.
+# Copyright Ericsson AB 2010-2017. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -274,9 +274,7 @@ gen/diameter_gen_base_accounting.erl gen/diameter_gen_base_accounting.hrl: \
gen/diameter_gen_acct_rfc6733.erl gen/diameter_gen_acct_rfc6733.hrl: \
$(EBIN)/diameter_gen_base_rfc6733.$(EMULATOR)
-gen/diameter_gen_relay.erl gen/diameter_gen_relay.hrl \
-gen/diameter_gen_base_rfc3588.erl gen/diameter_gen_base_rfc3588.hrl \
-gen/diameter_gen_base_rfc6733.erl gen/diameter_gen_base_rfc6733.hrl: \
+$(DICT_ERLS) $(DICT_HRLS): \
$(COMPILER_MODULES:%=$(EBIN)/%.$(EMULATOR))
$(DICT_MODULES:gen/%=$(EBIN)/%.$(EMULATOR)): \
diff --git a/lib/diameter/src/base/diameter.erl b/lib/diameter/src/base/diameter.erl
index bd92e16fba..b90b794611 100644
--- a/lib/diameter/src/base/diameter.erl
+++ b/lib/diameter/src/base/diameter.erl
@@ -46,7 +46,10 @@
-export([start/0,
stop/0]).
--export_type([evaluable/0,
+-export_type([eval/0,
+ evaluable/0, %% deprecated
+ decode_format/0,
+ strict_arities/0,
restriction/0,
message_length/0,
remotes/0,
@@ -299,7 +302,7 @@ call(SvcName, App, Message) ->
| realm
| {host, any|'DiameterIdentity'()}
| {realm, any|'DiameterIdentity'()}
- | {eval, evaluable()}
+ | {eval, eval()}
| {neg, peer_filter()}
| {all, [peer_filter()]}
| {any, [peer_filter()]}.
@@ -307,10 +310,13 @@ call(SvcName, App, Message) ->
-opaque peer_ref()
:: pid().
--type evaluable()
+-type eval()
:: {module(), atom(), list()}
| fun()
- | maybe_improper_list(evaluable(), list()).
+ | maybe_improper_list(eval(), list()).
+
+-type evaluable()
+ :: eval().
-type sequence()
:: {'Unsigned32'(), 0..32}.
@@ -320,29 +326,61 @@ call(SvcName, App, Message) ->
| node
| nodes
| [node()]
- | evaluable().
+ | eval().
-type remotes()
:: boolean()
| [node()]
- | evaluable().
+ | eval().
-type message_length()
:: 0..16#FFFFFF.
+-type decode_format()
+ :: record
+ | list
+ | map
+ | none
+ | record_from_map.
+
+-type strict_arities()
+ :: false
+ | encode
+ | decode.
+
+%% Options common to both start_service/2 and add_transport/2.
+
+-type common_opt()
+ :: {pool_size, pos_integer()}
+ | {capabilities_cb, eval()}
+ | {capx_timeout, 'Unsigned32'()}
+ | {strict_capx, boolean()}
+ | {strict_mbit, boolean()}
+ | {avp_dictionaries, [module()]}
+ | {disconnect_cb, eval()}
+ | {dpr_timeout, 'Unsigned32'()}
+ | {dpa_timeout, 'Unsigned32'()}
+ | {incoming_maxlen, message_length()}
+ | {length_errors, exit | handle | discard}
+ | {connect_timer, 'Unsigned32'()}
+ | {watchdog_timer, 'Unsigned32'() | {module(), atom(), list()}}
+ | {watchdog_config, [{okay|suspect, non_neg_integer()}]}
+ | {spawn_opt, list()}.
+
%% Options passed to start_service/2
-type service_opt()
:: capability()
| {application, [application_opt()]}
| {restrict_connections, restriction()}
- | {sequence, sequence() | evaluable()}
+ | {sequence, sequence() | eval()}
| {share_peers, remotes()}
+ | {decode_format, decode_format()}
+ | {traffic_counters, boolean()}
| {string_decode, boolean()}
- | {strict_mbit, boolean()}
- | {incoming_maxlen, message_length()}
+ | {strict_arities, true | strict_arities()}
| {use_shared_peers, remotes()}
- | {spawn_opt, list()}.
+ | common_opt().
-type application_opt()
:: {alias, app_alias()}
@@ -372,20 +410,9 @@ call(SvcName, App, Message) ->
:: {transport_module, atom()}
| {transport_config, any()}
| {transport_config, any(), 'Unsigned32'() | infinity}
- | {pool_size, pos_integer()}
| {applications, [app_alias()]}
| {capabilities, [capability()]}
- | {capabilities_cb, evaluable()}
- | {capx_timeout, 'Unsigned32'()}
- | {capx_strictness, boolean()}
- | {disconnect_cb, evaluable()}
- | {dpr_timeout, 'Unsigned32'()}
- | {dpa_timeout, 'Unsigned32'()}
- | {length_errors, exit | handle | discard}
- | {connect_timer, 'Unsigned32'()}
- | {watchdog_timer, 'Unsigned32'() | {module(), atom(), list()}}
- | {watchdog_config, [{okay|suspect, non_neg_integer()}]}
- | {spawn_opt, list()}
+ | common_opt()
| {private, any()}.
%% Predicate passed to remove_transport/2
diff --git a/lib/diameter/src/base/diameter_callback.erl b/lib/diameter/src/base/diameter_callback.erl
index f9cdc66c70..d04a416bef 100644
--- a/lib/diameter/src/base/diameter_callback.erl
+++ b/lib/diameter/src/base/diameter_callback.erl
@@ -26,16 +26,16 @@
%% as the Diameter application callback in question. The record has
%% one field for each callback function as well as 'default' and
%% 'extra' fields. A function-specific field can be set to a
-%% diameter:evaluable() in order to redirect the callback
+%% diameter:eval() in order to redirect the callback
%% corresponding to that field, or to 'false' to request the default
%% callback implemented in this module. If neither of these fields are
%% set then the 'default' field determines the form of the callback: a
%% module name results in the usual callback as if the module had been
-%% configured directly as the callback module, a diameter_evaluable()
+%% configured directly as the callback module, a diameter_eval()
%% in a callback applied to the atom-valued callback name and argument
%% list. For all callbacks not to this module, the 'extra' field is a
%% list of additional arguments, following arguments supplied by
-%% diameter but preceding those of the diameter:evaluable() being
+%% diameter but preceding those of the diameter:eval() being
%% applied.
%%
%% For example, the following config to diameter:start_service/2, in
diff --git a/lib/diameter/src/base/diameter_codec.erl b/lib/diameter/src/base/diameter_codec.erl
index 82fa796e69..2dd2c906a2 100644
--- a/lib/diameter/src/base/diameter_codec.erl
+++ b/lib/diameter/src/base/diameter_codec.erl
@@ -29,7 +29,7 @@
msg_name/2,
msg_id/1]).
-%% Towards generated encoders (from diameter_gen.hrl).
+%% towards diameter_gen
-export([pack_data/2,
pack_avp/2]).
@@ -110,7 +110,7 @@ encode(Mod, Opts, Msg) ->
enc(_, Opts, #diameter_packet{msg = [#diameter_header{} = Hdr | As]}
= Pkt) ->
- try encode_avps(reorder(As), Opts) of
+ try encode_avps(As, Opts) of
Avps ->
Bin = list_to_binary(Avps),
Len = 20 + size(Bin),
@@ -206,51 +206,12 @@ values(Avps) ->
%% Message as a list of #diameter_avp{} ...
encode_avps(_, _, [#diameter_avp{} | _] = Avps, Opts) ->
- encode_avps(reorder(Avps), Opts);
+ encode_avps(Avps, Opts);
%% ... or as a tuple list or record.
encode_avps(Mod, MsgName, Values, Opts) ->
Mod:encode_avps(MsgName, Values, Opts).
-%% reorder/1
-%%
-%% Reorder AVPs for the relay case using the index field of
-%% diameter_avp records. Decode populates this field in collect_avps
-%% and presents AVPs in reverse order. A relay then sends the reversed
-%% list with a Route-Record AVP prepended. The goal here is just to do
-%% lists:reverse/1 in Grouped AVPs and the outer list, but only in the
-%% case there are indexed AVPs at all, so as not to reverse lists that
-%% have been explicilty sent (unindexed, in the desired order) as a
-%% diameter_avp list. The effect is the same as lists:keysort/2, but
-%% only on the cases we expect, not a general sort.
-
-reorder(Avps) ->
- case reorder(Avps, []) of
- false ->
- Avps;
- Sorted ->
- Sorted
- end.
-
-%% reorder/3
-
-%% In case someone has reversed the list already. (Not likely.)
-reorder([#diameter_avp{index = 0} | _] = Avps, Acc) ->
- Avps ++ Acc;
-
-%% Assume indexed AVPs are in reverse order.
-reorder([#diameter_avp{index = N} = A | Avps], Acc)
- when is_integer(N) ->
- lists:reverse(Avps, [A | Acc]);
-
-%% An unindexed AVP.
-reorder([H | T], Acc) ->
- reorder(T, [H | Acc]);
-
-%% No indexed members.
-reorder([], _) ->
- false.
-
%% encode_avps/2
encode_avps(Avps, Opts) ->
@@ -287,7 +248,8 @@ rec2msg(Mod, Rec) ->
%% longer *the* decode.
decode(Mod, Pkt) ->
- Opts = #{string_decode => true,
+ Opts = #{decode_format => record,
+ string_decode => true,
strict_mbit => true,
rfc => 6733},
decode(Mod, Opts, Pkt).
@@ -326,13 +288,7 @@ decode(Mod, AppMod, Opts, Pkt) ->
%% Relay application: just extract the avp's without any decoding of
%% their data since we don't know the application in question.
decode(?APP_ID_RELAY, _, _, _, #diameter_packet{} = Pkt) ->
- case collect_avps(Pkt) of
- {E, As} ->
- Pkt#diameter_packet{avps = As,
- errors = [E]};
- As ->
- Pkt#diameter_packet{avps = As}
- end;
+ collect_avps(Pkt);
%% Otherwise decode using the dictionary.
decode(_, Mod, AppMod, Opts, #diameter_packet{header = Hdr} = Pkt) ->
@@ -341,44 +297,50 @@ decode(_, Mod, AppMod, Opts, #diameter_packet{header = Hdr} = Pkt) ->
is_error = IsError}
= Hdr,
- MsgName = if IsError andalso not IsRequest ->
+ MsgName = if IsError, not IsRequest ->
'answer-message';
true ->
Mod:msg_name(CmdCode, IsRequest)
end,
- decode_avps(MsgName, Mod, AppMod, Opts, Pkt, collect_avps(Pkt));
+ decode_avps(MsgName, Mod, AppMod, Opts, Pkt);
decode(Id, Mod, AppMod, Opts, Bin)
when is_binary(Bin) ->
decode(Id, Mod, AppMod, Opts, #diameter_packet{header = decode_header(Bin),
bin = Bin}).
-%% decode_avps/6
-
-decode_avps(MsgName, Mod, AppMod, Opts, Pkt, {E, Avps}) ->
- ?LOG(invalid_avp_length, Pkt#diameter_packet.header),
- #diameter_packet{errors = Failed}
- = P
- = decode_avps(MsgName, Mod, AppMod, Opts, Pkt, Avps),
- P#diameter_packet{errors = [E | Failed]};
+%% decode_avps/5
-decode_avps('', _, _, _, Pkt, Avps) -> %% unknown message ...
- ?LOG(unknown_message, Pkt#diameter_packet.header),
- Pkt#diameter_packet{avps = lists:reverse(Avps),
- errors = [3001]}; %% DIAMETER_COMMAND_UNSUPPORTED
+decode_avps('', _, _, _, #diameter_packet{header = H, %% unknown message
+ bin = Bin}
+ = Pkt) ->
+ ?LOG(unknown_message, H),
+ Pkt#diameter_packet{avps = collect_avps(Bin),
+ errors = [3001]}; %% DIAMETER_COMMAND_UNSUPPORTED
%% msg = undefined identifies this case.
-decode_avps(MsgName, Mod, AppMod, Opts, Pkt, Avps) -> %% ... or not
+decode_avps(MsgName, Mod, AppMod, Opts, #diameter_packet{bin = Bin} = Pkt) ->
+ {_, Avps} = split_binary(Bin, 20),
{Rec, As, Errors} = Mod:decode_avps(MsgName,
Avps,
- Opts#{dictionary => AppMod,
+ Opts#{app_dictionary => AppMod,
failed_avp => false}),
?LOGC([] /= Errors, decode_errors, Pkt#diameter_packet.header),
- Pkt#diameter_packet{msg = Rec,
+ Pkt#diameter_packet{msg = reformat(MsgName, Rec, Opts),
errors = Errors,
avps = As}.
+%% reformat/3
+
+reformat(MsgName, Avps, #{decode_format := T})
+ when T == map;
+ T == list ->
+ [MsgName | Avps];
+
+reformat(_, Msg, _) ->
+ Msg.
+
%%% ---------------------------------------------------------------------------
%%% # decode_header/1
%%% ---------------------------------------------------------------------------
@@ -515,24 +477,21 @@ msg_id(<<_:32, Rbit:1, _:7, CmdCode:24, ApplId:32, _/binary>>) ->
%%% # collect_avps/1
%%% ---------------------------------------------------------------------------
-%% Note that the returned list of AVP's is reversed relative to their
-%% order in the binary. Note also that grouped avp's aren't unraveled,
-%% only those at the top level.
+%% This is only used for the relay decode. Note that grouped avp's
+%% aren't unraveled, only those at the top level.
--spec collect_avps(#diameter_packet{} | binary())
- -> [Avp]
- | {Error, [Avp]}
- when Avp :: #diameter_avp{},
- Error :: {5014, #diameter_avp{}}.
+-spec collect_avps(#diameter_packet{})
+ -> #diameter_packet{};
+ (binary())
+ -> [#diameter_avp{}].
-collect_avps(#diameter_packet{bin = <<_:20/binary, Avps/binary>>}) ->
- collect_avps(Avps, 0, []);
+collect_avps(#diameter_packet{bin = Bin} = Pkt) ->
+ Pkt#diameter_packet{avps = collect_avps(Bin)};
-collect_avps(Bin)
- when is_binary(Bin) ->
- collect_avps(Bin, 0, []).
+collect_avps(<<_:20/binary, Avps/binary>>) ->
+ collect(Avps).
-%% collect_avps/3
+%% collect/1
%% 0 1 2 3
%% 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
@@ -546,66 +505,48 @@ collect_avps(Bin)
%% | Data ...
%% +-+-+-+-+-+-+-+-+
-collect_avps(<<Code:32, V:1, M:1, P:1, _:5, Len:24, I:V/unit:32, Rest/binary>>,
- N,
- Acc) ->
- collect_avps(Code,
- if 1 == V -> I; 0 == V -> undefined end,
- 1 == M,
- 1 == P,
- Len - 8 - V*4, %% Might be negative, which ensures
- ?PAD(Len), %% failure of the Data match below.
- Rest,
- N,
- Acc);
-
-collect_avps(<<>>, _, Acc) ->
- Acc;
+collect(<<Code:32, V:1, M:1, P:1, _:5, Len:24, I:V/unit:32, Rest/binary>>) ->
+ collect(Rest,
+ Code,
+ if 1 == V -> I; 0 == V -> undefined end,
+ Len - 8 - V*4, %% Might be negative, which ensures
+ ?PAD(Len), %% failure of the match below.
+ 1 == M,
+ 1 == P);
+
+collect(<<>>) ->
+ [];
%% Header is truncated. pack_avp/1 will pad this at encode if sent in
%% a Failed-AVP.
-collect_avps(Bin, _, Acc) ->
- {{5014, #diameter_avp{data = Bin}}, Acc}.
+collect(Bin) ->
+ [#diameter_avp{data = {5014, Bin}}].
-%% collect_avps/9
+%% collect/7
-%% Duplicate the diameter_avp creation in each branch below to avoid
-%% modifying the record, which profiling has shown to be a relatively
-%% costly part of building the list.
-
-collect_avps(Code, VendorId, M, P, Len, Pad, Rest, N, Acc) ->
- case Rest of
- <<Data:Len/binary, _:Pad/binary, T/binary>> ->
+collect(Bin, Code, Vid, DataLen, Pad, M, P) ->
+ case Bin of
+ <<Data:DataLen/binary, _:Pad/binary, Rest/binary>> ->
Avp = #diameter_avp{code = Code,
- vendor_id = VendorId,
+ vendor_id = Vid,
is_mandatory = M,
need_encryption = P,
- data = Data,
- index = N},
- collect_avps(T, N+1, [Avp | Acc]);
+ data = Data},
+ [Avp | collect(Rest)];
_ ->
%% Length in header points past the end of the message, or
- %% doesn't span the header. As stated in the 6733 text
- %% above, it's sufficient to return a zero-filled minimal
- %% payload if this is a request. Do this (in cases that we
- %% know the type) by inducing a decode failure and letting
- %% the dictionary's decode (in diameter_gen) deal with it.
- %%
- %% Note that the extra bit can only occur in the trailing
- %% AVP of a message or Grouped AVP, since a faulty AVP
- %% Length is otherwise indistinguishable from a correct
- %% one here, as we don't know the types of the AVPs being
- %% extracted.
- Avp = #diameter_avp{code = Code,
- vendor_id = VendorId,
- is_mandatory = M,
- need_encryption = P,
- data = {5014, Rest},
- index = N},
- [Avp | Acc]
+ %% doesn't span the header. Note that an length error can
+ %% only occur in the trailing AVP of a message or Grouped
+ %% AVP, since a faulty AVP Length is otherwise
+ %% indistinguishable from a correct one here, as we don't
+ %% know the types of the AVPs being extracted.
+ [#diameter_avp{code = Code,
+ vendor_id = Vid,
+ is_mandatory = M,
+ need_encryption = P,
+ data = {5014, Bin}}]
end.
-
%% 3588:
%%
%% DIAMETER_INVALID_AVP_LENGTH 5014
@@ -673,8 +614,8 @@ pack_avp(#diameter_avp{data = {T, {Type, Value}}}, Opts) ->
pack_avp(#diameter_avp{data = {T, Data}}, _) ->
pack_data(T, Data);
-pack_avp(#diameter_avp{data = {Dict, Name, Data}}, Opts) ->
- pack_data(Dict:avp_header(Name), Dict:avp(encode, Data, Name, Opts));
+pack_avp(#diameter_avp{data = {Dict, Name, Value}}, Opts) ->
+ pack_data(Dict:avp_header(Name), Dict:avp(encode, Value, Name, Opts));
%% ... with a truncated header ...
pack_avp(#diameter_avp{code = undefined, data = B}, _)
diff --git a/lib/diameter/src/base/diameter_config.erl b/lib/diameter/src/base/diameter_config.erl
index 34018ae6d3..90a9282349 100644
--- a/lib/diameter/src/base/diameter_config.erl
+++ b/lib/diameter/src/base/diameter_config.erl
@@ -102,9 +102,6 @@
-record(monitor, {mref = make_ref() :: reference(),
service}). %% name
-%% The default sequence mask.
--define(NOMASK, {0,32}).
-
%% Time to lay low before restarting a dead service.
-define(RESTART_SLEEP, 2000).
@@ -560,87 +557,186 @@ add(SvcName, Type, Opts0) ->
end.
transport_opts(Opts) ->
- lists:map(fun topt/1, Opts).
+ [setopt(transport, T) || T <- Opts].
+
+%% setopt/2
-topt(T) ->
- case opt(T) of
+setopt(K, T) ->
+ case opt(K, T) of
{value, X} ->
X;
true ->
T;
false ->
- ?THROW({invalid, T})
+ ?THROW({invalid, T});
+ {error, Reason} ->
+ ?THROW({invalid, T, Reason})
end.
-opt({transport_module, M}) ->
+%% opt/2
+
+opt(_, {incoming_maxlen, N}) ->
+ is_integer(N) andalso 0 =< N andalso N < 1 bsl 24;
+
+opt(service, {K, B})
+ when K == string_decode;
+ K == traffic_counters ->
+ is_boolean(B);
+
+opt(service, {K, false})
+ when K == share_peers;
+ K == use_shared_peers;
+ K == monitor;
+ K == restrict_connections;
+ K == strict_arities ->
+ true;
+
+opt(service, {K, true})
+ when K == share_peers;
+ K == use_shared_peers;
+ K == strict_arities ->
+ true;
+
+opt(service, {decode_format, T})
+ when T == record;
+ T == list;
+ T == map;
+ T == none;
+ T == record_from_map ->
+ true;
+
+opt(service, {strict_arities, T})
+ when T == encode;
+ T == decode ->
+ true;
+
+opt(service, {restrict_connections, T})
+ when T == node;
+ T == nodes ->
+ true;
+
+opt(service, {K, T})
+ when (K == share_peers
+ orelse K == use_shared_peers
+ orelse K == restrict_connections), ([] == T
+ orelse is_atom(hd(T))) ->
+ true;
+
+opt(service, {monitor, P}) ->
+ is_pid(P);
+
+opt(service, {K, F})
+ when K == restrict_connections;
+ K == share_peers;
+ K == use_shared_peers ->
+ try diameter_lib:eval(F) of %% but no guarantee that it won't fail later
+ Nodes ->
+ is_list(Nodes) orelse {error, Nodes}
+ catch
+ E:R ->
+ {error, {E, R, ?STACK}}
+ end;
+
+opt(service, {sequence, {H,N}}) ->
+ 0 =< N andalso N =< 32
+ andalso is_integer(H)
+ andalso 0 =< H
+ andalso 0 == H bsr (32-N);
+
+opt(service = S, {sequence = K, F}) ->
+ try diameter_lib:eval(F) of
+ {_,_} = T ->
+ KT = {K,T},
+ opt(S, KT) andalso {value, KT};
+ V ->
+ {error, V}
+ catch
+ E:R ->
+ {error, {E, R, ?STACK}}
+ end;
+
+opt(transport, {transport_module, M}) ->
is_atom(M);
-opt({transport_config, _, Tmo}) ->
+opt(transport, {transport_config, _, Tmo}) ->
?IS_UINT32(Tmo) orelse Tmo == infinity;
-opt({applications, As}) ->
+opt(transport, {applications, As}) ->
is_list(As);
-opt({capabilities, Os}) ->
- is_list(Os) andalso ok == encode_CER(Os);
+opt(transport, {capabilities, Os}) ->
+ is_list(Os) andalso try ok = encode_CER(Os), true
+ catch ?FAILURE(No) -> {error, No}
+ end;
-opt({K, Tmo})
+opt(_, {K, Tmo})
when K == capx_timeout;
K == dpr_timeout;
K == dpa_timeout ->
?IS_UINT32(Tmo);
-opt({capx_strictness, B}) ->
+opt(_, {capx_strictness, B}) ->
+ is_boolean(B) andalso {value, {strict_capx, B}};
+opt(_, {K, B})
+ when K == strict_capx;
+ K == strict_mbit ->
is_boolean(B);
-opt({length_errors, T}) ->
+opt(_, {avp_dictionaries, Mods}) ->
+ is_list(Mods) andalso lists:all(fun erlang:is_atom/1, Mods);
+
+opt(_, {length_errors, T}) ->
lists:member(T, [exit, handle, discard]);
-opt({K, Tmo})
- when K == reconnect_timer; %% deprecated
- K == connect_timer ->
+opt(transport, {reconnect_timer, Tmo}) -> %% deprecated
+ ?IS_UINT32(Tmo) andalso {value, {connect_timer, Tmo}};
+opt(_, {connect_timer, Tmo}) ->
?IS_UINT32(Tmo);
-opt({watchdog_timer, {M,F,A}})
+opt(_, {watchdog_timer, {M,F,A}})
when is_atom(M), is_atom(F), is_list(A) ->
true;
-opt({watchdog_timer, Tmo}) ->
+opt(_, {watchdog_timer, Tmo}) ->
?IS_UINT32(Tmo);
-opt({watchdog_config, L}) ->
- is_list(L) andalso lists:all(fun wdopt/1, L);
+opt(_, {watchdog_config, L}) ->
+ is_list(L) andalso lists:all(fun wd/1, L);
-opt({spawn_opt, {M,F,A}})
+opt(_, {spawn_opt, {M,F,A}})
when is_atom(M), is_atom(F), is_list(A) ->
true;
-opt({spawn_opt = K, Opts}) ->
+opt(_, {spawn_opt = K, Opts}) ->
if is_list(Opts) ->
{value, {K, spawn_opts(Opts)}};
true ->
false
end;
-opt({pool_size, N}) ->
+opt(_, {pool_size, N}) ->
is_integer(N) andalso 0 < N;
-%% Options that we can't validate.
-opt({K, _})
+%% Options we can't validate.
+opt(_, {K, _})
+ when K == disconnect_cb;
+ K == capabilities_cb ->
+ true;
+opt(transport, {K, _})
when K == transport_config;
- K == capabilities_cb;
- K == disconnect_cb;
K == private ->
true;
-%% Anything else, which is ignored by us. This makes options sensitive
-%% to spelling mistakes but arbitrary options are passed by some users
-%% as a way to identify transports. (That is, can't just do away with
-%% it.)
-opt(_) ->
- true.
+%% Anything else, which is ignored in transport config. This makes
+%% options sensitive to spelling mistakes, but arbitrary options are
+%% passed by some users as a way to identify transports so can't just
+%% do away with it.
+opt(K, _) ->
+ K == transport.
+
+%% wd/1
-wdopt({K,N}) ->
+wd({K,N}) ->
(K == okay orelse K == suspect) andalso is_integer(N) andalso 0 =< N;
-wdopt(_) ->
+wd(_) ->
false.
%% start_transport/2
@@ -705,16 +801,7 @@ make_config(SvcName, Opts) ->
ok = encode_CER(CapOpts),
- SvcOpts = make_opts((Opts -- AppOpts) -- CapOpts,
- [{false, share_peers},
- {false, use_shared_peers},
- {false, monitor},
- {?NOMASK, sequence},
- {nodes, restrict_connections},
- {16#FFFFFF, incoming_maxlen},
- {true, strict_mbit},
- {true, string_decode},
- {[], spawn_opt}]),
+ SvcOpts = service_opts((Opts -- AppOpts) -- CapOpts),
D = proplists:get_value(string_decode, SvcOpts, true),
@@ -728,98 +815,22 @@ binary_caps(Caps, true) ->
binary_caps(Caps, false) ->
diameter_capx:binary_caps(Caps).
-%% make_opts/2
-
-make_opts(Opts, Defs) ->
- Known = [{K, get_opt(K, Opts, D)} || {D,K} <- Defs],
- Unknown = Opts -- Known,
-
- [] == Unknown orelse ?THROW({invalid, hd(Unknown)}),
+%% service_opts/1
- [{K, opt(K,V)} || {K,V} <- Known].
-
-opt(incoming_maxlen, N)
- when 0 =< N, N < 1 bsl 24 ->
- N;
-
-opt(spawn_opt, {M,F,A} = T)
- when is_atom(M), is_atom(F), is_list(A) ->
- T;
-
-opt(spawn_opt, L)
- when is_list(L) ->
- spawn_opts(L);
-
-opt(K, false = B)
- when K == share_peers;
- K == use_shared_peers;
- K == monitor;
- K == restrict_connections;
- K == strict_mbit;
- K == string_decode ->
- B;
-
-opt(K, true = B)
- when K == share_peers;
- K == use_shared_peers;
- K == strict_mbit;
- K == string_decode ->
- B;
-
-opt(restrict_connections, T)
- when T == node;
- T == nodes ->
- T;
-
-opt(K, T)
- when (K == share_peers
- orelse K == use_shared_peers
- orelse K == restrict_connections), ([] == T
- orelse is_atom(hd(T))) ->
- T;
-
-opt(monitor, P)
- when is_pid(P) ->
- P;
-
-opt(K, F)
- when K == restrict_connections;
- K == share_peers;
- K == use_shared_peers ->
- try diameter_lib:eval(F) of %% but no guarantee that it won't fail later
- Nodes when is_list(Nodes) ->
- F;
- V ->
- ?THROW({value, {K,V}})
- catch
- E:R ->
- ?THROW({value, {K, E, R, ?STACK}})
- end;
-
-opt(sequence, {_,_} = T) ->
- sequence(T);
-
-opt(sequence = K, F) ->
- try diameter_lib:eval(F) of
- T -> sequence(T)
- catch
- E:R ->
- ?THROW({value, {K, E, R, ?STACK}})
- end;
-
-opt(K, _) ->
- ?THROW({value, K}).
+service_opts(Opts) ->
+ Res = [setopt(service, T) || T <- Opts],
+ Keys = sets:to_list(sets:from_list([K || {K,_} <- Res])), %% unique
+ Dups = lists:foldl(fun(K,A) -> lists:keydelete(K, 1, A) end, Res, Keys),
+ [] == Dups orelse ?THROW({duplicate, Dups}),
+ Res.
+%% Reject duplicates on a service, but not on a transport. There's no
+%% particular reason for the inconsistency, but the historic behaviour
+%% ignores all but the first of a transport_opt(), and there's no real
+%% reason to change it.
spawn_opts(L) ->
[T || T <- L, T /= link, T /= monitor].
-sequence({H,N} = T)
- when 0 =< N, N =< 32, 0 =< H, 0 == H bsr (32-N) ->
- T;
-
-sequence(_) ->
- ?THROW({value, sequence}).
-
make_caps(Caps, Opts) ->
case diameter_capx:make_caps(Caps, Opts) of
{ok, T} ->
diff --git a/lib/diameter/src/base/diameter_gen.erl b/lib/diameter/src/base/diameter_gen.erl
index e832832876..d3b9f704fe 100644
--- a/lib/diameter/src/base/diameter_gen.erl
+++ b/lib/diameter/src/base/diameter_gen.erl
@@ -26,6 +26,14 @@
-module(diameter_gen).
+-compile({inline, [incr/8,
+ incr/4,
+ field/1,
+ setopts/4,
+ avp_arity/5,
+ set_failed/2,
+ set_strict/3]}).
+
-export([encode_avps/3,
decode_avps/3,
grouped_avp/4,
@@ -37,7 +45,7 @@
-define(THROW(T), throw({?MODULE, T})).
-type parent_name() :: atom(). %% parent = Message or AVP
--type parent_record() :: tuple(). %%
+-type parent_record() :: tuple() | avp_values() | map().
-type avp_name() :: atom().
-type avp_record() :: tuple().
-type avp_values() :: [{avp_name(), term()}].
@@ -46,17 +54,21 @@
-type grouped_avp() :: nonempty_improper_list(#diameter_avp{}, [avp()]).
-type avp() :: non_grouped_avp() | grouped_avp().
+%% The arbitrary arity returned from dictionary avp_arity functions.
+-define(ANY, {0, '*'}).
+
%% ---------------------------------------------------------------------------
%% # encode_avps/3
%% ---------------------------------------------------------------------------
--spec encode_avps(parent_name(), parent_record() | avp_values(), map())
+-spec encode_avps(parent_name(), parent_record(), map())
-> iolist()
| no_return().
encode_avps(Name, Vals, #{module := Mod} = Opts) ->
+ Strict = mget(strict_arities, Opts, encode),
try
- encode(Name, Vals, Opts, Mod)
+ encode(Name, Vals, Opts, Strict, Mod)
catch
throw: {?MODULE, Reason} ->
diameter_lib:log({encode, error},
@@ -73,128 +85,340 @@ encode_avps(Name, Vals, #{module := Mod} = Opts) ->
erlang:error({encode_failure, Reason, Name, Stack})
end.
-%% encode/4
+%% encode/5
-encode(Name, Vals, #{ordered_encode := false} = Opts, Mod)
+encode(Name, Vals, Opts, Strict, Mod)
when is_list(Vals) ->
- lists:map(fun({F,V}) -> encode(Name, F, V, Opts, Mod) end, Vals);
+ case Opts of
+ #{ordered_encode := false} ->
+ lists:map(fun({F,V}) -> encode(Name, F, V, Opts, Strict, Mod) end,
+ Vals);
+ _ ->
+ Rec = Mod:'#set-'(Vals, newrec(Mod, Name)),
+ encode(Name, Rec, Opts, Strict, Mod)
+ end;
-encode(Name, Vals, Opts, Mod)
- when is_list(Vals) ->
- encode(Name, Mod:'#set-'(Vals, newrec(Mod, Name)), Opts, Mod);
+encode(Name, Map, Opts, Strict, Mod)
+ when is_map(Map) ->
+ [enc(F, A, V, Opts, Strict, Mod) || {F,A} <- Mod:avp_arity(Name),
+ V <- [mget(F, Map, undefined)]];
-encode(Name, Rec, Opts, Mod) ->
- [encode(Name, F, V, Opts, Mod) || {F,V} <- Mod:'#get-'(Rec)].
+encode(Name, Rec, Opts, Strict, Mod) ->
+ [encode(Name, F, V, Opts, Strict, Mod) || {F,V} <- Mod:'#get-'(Rec)].
-%% encode/5
+%% encode/6
+
+encode(_, AvpName, Values, Opts, Strict, Mod)
+ when Strict /= encode ->
+ enc(AvpName, ?ANY, Values, Opts, Strict, Mod);
-encode(Name, AvpName, Values, Opts, Mod) ->
- enc(Name, AvpName, Mod:avp_arity(Name, AvpName), Values, Opts, Mod).
+encode(Name, AvpName, Values, Opts, Strict, Mod) ->
+ Arity = Mod:avp_arity(Name, AvpName),
+ enc(AvpName, Arity, Values, Opts, Strict, Mod).
%% enc/6
-enc(_, AvpName, 1, undefined, _, _) ->
+enc(AvpName, Arity, Values, Opts, Strict, Mod)
+ when Strict /= encode, Arity /= ?ANY ->
+ enc(AvpName, ?ANY, Values, Opts, Strict, Mod);
+
+enc(AvpName, 1, undefined, _, _, _) ->
?THROW([mandatory_avp_missing, AvpName]);
-enc(Name, AvpName, 1, Value, Opts, Mod) ->
- enc(Name, AvpName, [Value], Opts, Mod);
+enc(AvpName, 1, Value, Opts, _, Mod) ->
+ H = avp_header(AvpName, Mod),
+ enc(AvpName, H, Value, Opts, Mod);
+
+enc(_, {0,_}, [], _, _, _) ->
+ [];
+
+enc(_, _, undefined, _, _, _) ->
+ [];
+
+%% Be forgiving when a list of values is expected. If the value itself
+%% is a list then the user has to wrap it to avoid each member from
+%% being interpreted as an individual AVP value.
+enc(AvpName, Arity, V, Opts, Strict, Mod)
+ when not is_list(V) ->
+ enc(AvpName, Arity, [V], Opts, Strict, Mod);
-enc(_, _, {0,_}, [], _, _) ->
+enc(AvpName, {Min, Max}, Values, Opts, Strict, Mod) ->
+ H = avp_header(AvpName, Mod),
+ enc(AvpName, H, Min, 0, Max, Values, Opts, Strict, Mod).
+
+%% enc/9
+
+enc(AvpName, H, Min, N, Max, Vs, Opts, Strict, Mod)
+ when Strict /= encode;
+ Max == '*', Min =< N ->
+ [enc(AvpName, H, V, Opts, Mod) || V <- Vs];
+
+enc(AvpName, _, Min, N, _, [], _, _, _)
+ when N < Min ->
+ ?THROW([repeated_avp_insufficient_arity, AvpName, Min, N]);
+
+enc(_, _, _, _, _, [], _, _, _) ->
[];
-enc(_, AvpName, _, T, _, _)
- when not is_list(T) ->
- ?THROW([repeated_avp_as_non_list, AvpName, T]);
+enc(AvpName, _, _, N, Max, _, _, _, _)
+ when Max =< N ->
+ ?THROW([repeated_avp_excessive_arity, AvpName, Max]);
+
+enc(AvpName, H, Min, N, Max, [V|Vs], Opts, Strict, Mod) ->
+ [enc(AvpName, H, V, Opts, Mod)
+ | enc(AvpName, H, Min, N+1, Max, Vs, Opts, Strict, Mod)].
-enc(_, AvpName, {Min, _}, L, _, _)
- when length(L) < Min ->
- ?THROW([repeated_avp_insufficient_arity, AvpName, Min, L]);
+%% avp_header/2
-enc(_, AvpName, {_, Max}, L, _, _)
- when Max < length(L) ->
- ?THROW([repeated_avp_excessive_arity, AvpName, Max, L]);
+avp_header('AVP', _) ->
+ false;
-enc(Name, AvpName, _, Values, Opts, Mod) ->
- enc(Name, AvpName, Values, Opts, Mod).
+avp_header(AvpName, Mod) ->
+ {_,_,_} = Mod:avp_header(AvpName).
%% enc/5
-enc(Name, 'AVP', Values, Opts, Mod) ->
- [enc_AVP(Name, A, Opts, Mod) || A <- Values];
+enc('AVP', false, Value, Opts, Mod) ->
+ enc_AVP(Value, Opts, Mod);
-enc(_, AvpName, Values, Opts, Mod) ->
- enc(AvpName, Values, Opts, Mod).
+enc(AvpName, Hdr, Value, Opts, Mod) ->
+ enc1(AvpName, Hdr, Value, Opts, Mod).
-%% enc/4
+%% enc1/5
-enc(AvpName, Values, Opts, Mod) ->
- H = Mod:avp_header(AvpName),
- [diameter_codec:pack_data(H, Mod:avp(encode, V, AvpName, Opts))
- || V <- Values].
+enc1(AvpName, {_,_,_} = Hdr, Value, Opts, Mod) ->
+ diameter_codec:pack_data(Hdr, Mod:avp(encode, Value, AvpName, Opts)).
-%% enc_AVP/4
+%% enc1/6
+
+enc1(AvpName, {_,_,_} = Hdr, Value, Opts, Mod, Dict) ->
+ diameter_codec:pack_data(Hdr, avp(encode, Value, AvpName, Opts, Mod, Dict)).
+
+%% enc_AVP/3
%% No value: assume AVP data is already encoded. The normal case will
%% be when this is passed back from #diameter_packet.errors as a
%% consequence of a failed decode. Any AVP can be encoded this way
%% however, which side-steps any arity checks for known AVP's and
%% could potentially encode something unfortunate.
-enc_AVP(_, #diameter_avp{value = undefined} = A, Opts, _) ->
+enc_AVP(#diameter_avp{value = undefined} = A, Opts, _) ->
diameter_codec:pack_avp(A, Opts);
-%% Missing name for value encode.
-enc_AVP(_, #diameter_avp{name = N, value = V}, _, _)
- when N == undefined;
- N == 'AVP' ->
- ?THROW([value_with_nameless_avp, N, V]);
+%% Encode a name/value pair using an alternate dictionary if need be ...
+enc_AVP(#diameter_avp{name = AvpName, value = Value}, Opts, Mod) ->
+ enc_AVP(AvpName, Value, Opts, Mod);
+enc_AVP({AvpName, Value}, Opts, Mod) ->
+ enc_AVP(AvpName, Value, Opts, Mod);
+
+%% ... or with a specified dictionary.
+enc_AVP({Dict, AvpName, Value}, Opts, Mod) ->
+ enc1(AvpName, Dict:avp_header(AvpName), Value, Opts, Mod, Dict).
+
+%% Don't guard against anything being sent as a generic 'AVP', which
+%% allows arity restrictions to be abused.
+
+%% enc_AVP/4
+
+enc_AVP(AvpName, Value, Opts, Mod) ->
+ try Mod:avp_header(AvpName) of
+ H ->
+ enc1(AvpName, H, Value, Opts, Mod)
+ catch
+ error: _ ->
+ Dicts = mget(avp_dictionaries, Opts, []),
+ enc_AVP(Dicts, AvpName, Value, Opts, Mod)
+ end.
-%% Or not. Ensure that 'AVP' is the appropriate field. Note that if we
-%% don't know this AVP at all then the encode will fail.
-enc_AVP(Name, #diameter_avp{name = AvpName, value = Data}, Opts, Mod) ->
- 0 == Mod:avp_arity(Name, AvpName)
- orelse ?THROW([known_avp_as_AVP, Name, AvpName, Data]),
- enc(AvpName, [Data], Opts, Mod);
+%% enc_AVP/5
-%% The backdoor ...
-enc_AVP(_, {AvpName, Value}, Opts, Mod) ->
- enc(AvpName, [Value], Opts, Mod);
+enc_AVP([Dict | Rest], AvpName, Value, Opts, Mod) ->
+ try Dict:avp_header(AvpName) of
+ H ->
+ enc1(AvpName, H, Value, Opts, Mod, Dict)
+ catch
+ error: _ ->
+ enc_AVP(Rest, AvpName, Value, Opts, Mod)
+ end;
-%% ... and the side door.
-enc_AVP(_Name, {_Dict, _AvpName, _Data} = T, Opts, _) ->
- diameter_codec:pack_avp(#diameter_avp{data = T}, Opts).
+enc_AVP([], AvpName, _, _, _) ->
+ ?THROW([no_dictionary, AvpName]).
%% ---------------------------------------------------------------------------
%% # decode_avps/3
%% ---------------------------------------------------------------------------
--spec decode_avps(parent_name(), [#diameter_avp{}], map())
- -> {parent_record(), [avp()], Failed}
+-spec decode_avps(parent_name(), binary(), map())
+ -> {parent_record() | parent_name(), [avp()], Failed}
when Failed :: [{5000..5999, #diameter_avp{}}].
-decode_avps(Name, Recs, #{module := Mod} = Opts) ->
- {Avps, {Rec, Failed}}
- = mapfoldl(fun(T,A) -> decode(Name, Opts, Mod, T, A) end,
- {newrec(Mod, Name), []},
- Recs),
- {Rec, Avps, Failed ++ missing(Rec, Name, Failed, Opts, Mod)}.
-%% Append 5005 errors so that errors are reported in the order
+decode_avps(Name, Bin, #{module := Mod, decode_format := Fmt} = Opts) ->
+ Strict = mget(strict_arities, Opts, decode),
+ [AM, Avps, Failed | Rec]
+ = decode(Bin, Name, Mod, Fmt, Strict, Opts, 0, #{}),
+ %% AM counts the number of top-level AVPs, which missing/5 then
+ %% uses when appending 5005 errors.
+ {reformat(Name, Rec, Strict, Mod, Fmt),
+ Avps,
+ Failed ++ missing(Name, Strict, Mod, Opts, AM)}.
+
+%% Append arity errors so that errors are reported in the order
%% encountered. Failed-AVP should typically contain the first
-%% encountered error accordg to the RFC.
+%% error encountered.
+
+%% decode/8
+
+decode(<<Code:32, V:1, M:1, P:1, _:5, Len:24, I:V/unit:32, Rest/binary>>,
+ Name,
+ Mod,
+ Fmt,
+ Strict,
+ Opts,
+ Idx,
+ AM) ->
+ decode(Rest,
+ Code,
+ if 1 == V -> I; true -> undefined end,
+ Len - 8 - 4*V, %% possibly negative, causing case match to fail
+ (4 - (Len rem 4)) rem 4,
+ 1 == M,
+ 1 == P,
+ Name,
+ Mod,
+ Fmt,
+ Strict,
+ Opts,
+ Idx,
+ AM);
+
+decode(<<>>, Name, Mod, Fmt, Strict, _, _, AM) ->
+ [AM, [], [] | newrec(Fmt, Mod, Name, Strict)];
+
+decode(Bin, Name, Mod, Fmt, Strict, _, Idx, AM) ->
+ Avp = #diameter_avp{data = Bin, index = Idx},
+ [AM, [Avp], [{5014, Avp}] | newrec(Fmt, Mod, Name, Strict)].
+
+%% decode/14
+
+decode(Bin, Code, Vid, DataLen, Pad, M, P, Name, Mod, Fmt, Strict, Opts0,
+ Idx, AM0) ->
+ case Bin of
+ <<Data:DataLen/binary, _:Pad/binary, T/binary>> ->
+ {NameT, Field, Arity, {I, AM}}
+ = incr(Name, Code, Vid, M, Mod, Strict, Opts0, AM0),
+
+ Opts = setopts(NameT, Name, M, Opts0),
+ %% Not AvpName or else a failed Failed-AVP
+ %% decode is packed into 'AVP'.
+
+ Avp = #diameter_avp{code = Code,
+ vendor_id = Vid,
+ is_mandatory = M,
+ need_encryption = P,
+ data = Data,
+ name = name(NameT),
+ type = type(NameT),
+ index = Idx},
+
+ Dec = dec(Data, Name, NameT, Mod, Fmt, Opts, Avp),
+ Acc = decode(T, Name, Mod, Fmt, Strict, Opts, Idx+1, AM),%% recurse
+ acc(Acc, Dec, I, Field, Arity, Strict, Mod, Opts);
+ _ ->
+ {NameT, _Field, _Arity, {_, AM}}
+ = incr(Name, Code, Vid, M, Mod, Strict, Opts0, AM0),
+
+ Avp = #diameter_avp{code = Code,
+ vendor_id = Vid,
+ is_mandatory = M,
+ need_encryption = P,
+ data = Bin,
+ name = name(NameT),
+ type = type(NameT),
+ index = Idx},
+
+ [AM, [Avp], [{5014, Avp}] | newrec(Fmt, Mod, Name, Strict)]
+ end.
+
+%% incr/8
-%% mapfoldl/3
+incr(Name, Code, Vid, M, Mod, Strict, Opts, AM0) ->
+ NameT = Mod:avp_name(Code, Vid), %% {AvpName, Type} | 'AVP'
+ Field = field(NameT), %% AvpName | 'AVP'
+ Arity = avp_arity(Name, Field, Mod, Opts, M),
+ if 0 == Arity, 'AVP' /= Field ->
+ A = pack_arity(Name, Field, Opts, Mod, M),
+ {NameT, 'AVP', A, incr('AVP', A, Strict, AM0)};
+ true ->
+ {NameT, Field, Arity, incr(Field, Arity, Strict, AM0)}
+ end.
+
+%% Data is a truncated header if command_code = undefined, otherwise
+%% payload bytes. The former is padded to the length of a header if
+%% the AVP reaches an outgoing encode.
%%
-%% Like lists:mapfoldl/3, but don't reverse the list.
+%% RFC 6733 says that an AVP returned with 5014 can contain a minimal
+%% payload for the AVP's type, but don't always know the type.
-mapfoldl(F, Acc, List) ->
- mapfoldl(F, Acc, List, []).
+setopts('AVP', _, _, Opts) ->
+ Opts;
-mapfoldl(F, Acc0, [T|Rest], List) ->
- {B, Acc} = F(T, Acc0),
- mapfoldl(F, Acc, Rest, [B|List]);
-mapfoldl(_, Acc, [], List) ->
- {List, Acc}.
+setopts({_, Type}, Name, M, Opts) ->
+ set_failed(Name, set_strict(Type, M, Opts)).
-%% 3588:
+%% incr/4
+
+incr(_, A, SA, AM)
+ when A == ?ANY;
+ A == 0;
+ SA /= decode ->
+ {undefined, AM};
+
+incr(AvpName, _, _, AM) ->
+ case AM of
+ #{AvpName := N} ->
+ {N, AM#{AvpName => N+1}};
+ _ ->
+ {0, AM#{AvpName => 1}}
+ end.
+
+%% mget/3
+%%
+%% Measurably faster than maps:get/3.
+
+mget(Key, Map, Def) ->
+ case Map of
+ #{Key := V} ->
+ V;
+ _ ->
+ Def
+ end.
+
+%% name/1
+
+name({Name, _}) ->
+ Name;
+name(_) ->
+ undefined.
+
+%% type/1
+
+type({_, Type}) ->
+ Type;
+type(_) ->
+ undefined.
+
+%% missing/5
+
+missing(Name, decode, Mod, Opts, AM) ->
+ [{5005, empty_avp(N, Opts, Mod)} || {N,A} <- Mod:avp_arity(Name),
+ N /= 'AVP',
+ Mn <- [min_arity(A)],
+ 0 < Mn,
+ mget(N, AM, 0) < Mn];
+
+missing(_, _, _, _, _) ->
+ [].
+
+%% 3588/6733:
%%
%% DIAMETER_MISSING_AVP 5005
%% The request did not contain an AVP that is required by the Command
@@ -204,57 +428,20 @@ mapfoldl(_, Acc, [], List) ->
%% Vendor-Id if applicable. The value field of the missing AVP
%% should be of correct minimum length and contain zeros.
-missing(Rec, Name, Failed, Opts, Mod) ->
- Avps = lists:foldl(fun({_, #diameter_avp{code = C, vendor_id = V}}, A) ->
- maps:put({C,V}, true, A)
- end,
- maps:new(),
- Failed),
- missing(Mod:avp_arity(Name), tl(tuple_to_list(Rec)), Avps, Opts, Mod, []).
-
-missing([{Name, Arity} | As], [Value | Vs], Avps, Opts, Mod, Acc) ->
- missing(As,
- Vs,
- Avps,
- Opts,
- Mod,
- case
- [H || missing_arity(Arity, Value),
- {C,_,V} = H <- [Mod:avp_header(Name)],
- not maps:is_key({C,V}, Avps)]
- of
- [H] ->
- [{5005, empty_avp(Name, H, Opts, Mod)} | Acc];
- [] ->
- Acc
- end);
-
-missing([], [], _, _, _, Acc) ->
- Acc.
-
-%% Maximum arities have already been checked in building the record.
-
-missing_arity(1, V) ->
- V == undefined;
-missing_arity({0, _}, _) ->
- false;
-missing_arity({1, _}, L) ->
- [] == L;
-missing_arity({Min, _}, L) ->
- not has_prefix(Min, L).
-
-%% Compare a non-negative integer and the length of a list without
-%% computing the length.
-has_prefix(0, _) ->
- true;
-has_prefix(_, []) ->
- false;
-has_prefix(N, [_|L]) ->
- has_prefix(N-1, L).
+%% min_arity/1
+
+min_arity(1) ->
+ 1;
+min_arity({Mn,_}) ->
+ Mn.
-%% empty_avp/4
+%% empty_avp/3
-empty_avp(Name, {Code, Flags, VId}, Opts, Mod) ->
+empty_avp('AVP', _, _) ->
+ #diameter_avp{data = <<0:64>>};
+
+empty_avp(Name, Opts, Mod) ->
+ {Code, Flags, VId} = Mod:avp_header(Name),
{Name, Type} = Mod:avp_name(Code, VId),
#diameter_avp{name = Name,
code = Code,
@@ -273,21 +460,19 @@ empty_avp(Name, {Code, Flags, VId}, Opts, Mod) ->
%% specific errors that can be described by this AVP are described in
%% the following section.
-%% decode/5
+%% field/1
-decode(Name,
- Opts,
- Mod,
- #diameter_avp{code = Code, vendor_id = Vid}
- = Avp,
- Acc) ->
- decode(Name, Opts, Mod, Mod:avp_name(Code, Vid), Avp, Acc).
+field({AvpName, _}) ->
+ AvpName;
+field(_) ->
+ 'AVP'.
-%% decode/6
+%% dec/7
-%% AVP not in dictionary.
-decode(Name, Opts, Mod, 'AVP', Avp, Acc) ->
- decode_AVP(Name, Avp, Opts, Mod, Acc);
+%% AVP not in dictionary: try an alternate.
+
+dec(Data, Name, 'AVP', Mod, Fmt, Opts, Avp) ->
+ dec_AVP(dicts(Mod, Opts), Data, Name, Mod, Fmt, Opts, Avp);
%% 6733, 4.4:
%%
@@ -336,130 +521,149 @@ decode(Name, Opts, Mod, 'AVP', Avp, Acc) ->
%% defined the RFC's "unrecognized", which is slightly stronger than
%% "not defined".)
-decode(Name, Opts0, Mod, {AvpName, Type}, Avp, Acc) ->
- #diameter_avp{data = Data, is_mandatory = M}
- = Avp,
+dec(Data, Name, {AvpName, Type}, Mod, Fmt, Opts, Avp) ->
+ #{app_dictionary := AppMod, failed_avp := Failed}
+ = Opts,
- %% Whether or not to ignore an M-bit on an encapsulated AVP, or on
- %% all AVPs with the service_opt() strict_mbit.
- Opts1 = set_strict(Type, M, Opts0),
+ %% Reset the dictionary for best-effort decode of Failed-AVP.
+ Dict = if Failed -> AppMod;
+ true -> Mod
+ end,
- %% Whether or not we're decoding within Failed-AVP and should
- %% ignore decode errors.
- #{dictionary := AppMod, failed_avp := Failed}
- = Opts
- = set_failed(Name, Opts1), %% Not AvpName or else a failed Failed-AVP
- %% decode is packed into 'AVP'.
+ dec(Data, Name, AvpName, Type, Mod, Dict, Fmt, Failed, Opts, Avp).
- %% Reset the dictionary for best-effort decode of Failed-AVP.
- DecMod = if Failed ->
- AppMod;
- true ->
- Mod
- end,
-
- %% On decode, a Grouped AVP is represented as a #diameter_avp{}
- %% list with AVP as head and component AVPs as tail. On encode,
- %% data can be a list of component AVPs.
-
- try avp_decode(Data, AvpName, Opts, DecMod, Mod) of
- {Rec, As} when Type == 'Grouped' ->
- A = Avp#diameter_avp{name = AvpName,
- value = Rec,
- type = Type},
- {[A|As], pack_avp(Name, A, Opts, Mod, Acc)};
+%% dicts/2
- V when Type /= 'Grouped' ->
- A = Avp#diameter_avp{name = AvpName,
- value = V,
- type = Type},
- {A, pack_avp(Name, A, Opts, Mod, Acc)}
- catch
- throw: {?MODULE, {grouped, Error, ComponentAvps}} ->
- decode_error(Name,
- Error,
- ComponentAvps,
- Opts,
- Mod,
- Avp#diameter_avp{name = AvpName,
- data = trim(Avp#diameter_avp.data),
- type = Type},
- Acc);
+dicts(Mod, #{app_dictionary := Mod, avp_dictionaries := Dicts}) ->
+ Dicts;
+
+dicts(_, #{app_dictionary := Dict, avp_dictionaries := Dicts}) ->
+ [Dict | Dicts];
+dicts(Mod, #{app_dictionary := Mod}) ->
+ [];
+
+dicts(_, #{app_dictionary := Dict}) ->
+ [Dict].
+
+%% dec/10
+
+dec(Data, Name, AvpName, Type, Mod, Dict, Fmt, Failed, Opts, Avp) ->
+ try avp(decode, Data, AvpName, Opts, Mod, Dict) of
+ V ->
+ set(Type, Fmt, Avp, V)
+ catch
+ throw: {?MODULE, T} ->
+ decode_error(Failed, Fmt, T, Avp);
error: Reason ->
- decode_error(Name,
- Reason,
- Opts,
- Mod,
- Avp#diameter_avp{name = AvpName,
- data = trim(Avp#diameter_avp.data),
- type = Type},
- Acc)
+ decode_error(Failed, Reason, Name, Mod, Opts, Avp)
end.
-%% avp_decode/5
+%% dec_AVP/7
-avp_decode(Data, AvpName, Opts, Mod, Mod) ->
- Mod:avp(decode, Data, AvpName, Opts);
+dec_AVP([], _, _, _, _, _, Avp) ->
+ Avp;
-avp_decode(Data, AvpName, Opts, Mod, _) ->
- Mod:avp(decode, Data, AvpName, Opts, Mod).
+dec_AVP(Dicts, Data, Name, Mod, Fmt, Opts, #diameter_avp{code = Code,
+ vendor_id = Vid}
+ = Avp) ->
+ dec_AVP(Dicts, Data, Name, Mod, Fmt, Opts, Code, Vid, Avp).
-%% trim/1
+%% dec_AVP/9
%%
-%% Remove any extra bit that was added in diameter_codec to induce a
-%% 5014 error.
+%% Try to decode an AVP in the first alternate dictionary that defines
+%% it.
+
+dec_AVP([Dict | Rest], Data, Name, Mod, Fmt, Opts, Code, Vid, Avp) ->
+ case Dict:avp_name(Code, Vid) of
+ {AvpName, Type} ->
+ A = Avp#diameter_avp{name = AvpName,
+ type = Type},
+ #{failed_avp := Failed} = Opts,
+ dec(Data, Name, AvpName, Type, Mod, Dict, Fmt, Failed, Opts, A);
+ _ ->
+ dec_AVP(Rest, Data, Name, Mod, Fmt, Opts, Code, Vid, Avp)
+ end;
-trim(#diameter_avp{data = Data} = Avp) ->
- Avp#diameter_avp{data = trim(Data)};
+dec_AVP([], _, _, _, _, _, _, _, Avp) ->
+ Avp.
-trim({5014, Bin}) ->
- Bin;
+%% set/4
+%%
+%% A Grouped AVP is represented as a #diameter_avp{} list with AVP
+%% as head and component AVPs as tail.
-trim(Avps)
- when is_list(Avps) ->
- lists:map(fun trim/1, Avps);
+set('Grouped', Fmt, Avp, V) ->
+ {Rec, As} = V,
+ [set(Fmt, Avp, Rec) | As];
-trim(Avp) ->
- Avp.
+set(_, _, Avp, V) ->
+ Avp#diameter_avp{value = V}.
-%% decode_error/7
+%% decode_error/4
+%%
+%% Error when decoding a grouped AVP.
-decode_error(Name, [_ | Rec], _, #{failed_avp := true} = Opts, Mod, Avp, Acc) ->
- decode_AVP(Name, Avp#diameter_avp{value = Rec}, Opts, Mod, Acc);
+%% Ignoring errors in Failed-AVP.
+decode_error(true, Fmt, {Rec, ComponentAvps, _Errors}, Avp) ->
+ [set(Fmt, Avp, Rec) | ComponentAvps];
-decode_error(Name, _, _, #{failed_avp := true} = Opts, Mod, Avp, Acc) ->
- decode_AVP(Name, Avp, Opts, Mod, Acc);
+%% Or not. A faulty component is encoded by itself in Failed-AVP, as
+%% suggested by 7.5 of RFC 6733 (quoted below), so that errors are
+%% reported unambigiously.
+decode_error(false, _, {_, ComponentAvps, [{RC,A} | _]}, Avp) ->
+ {RC, [Avp | ComponentAvps], Avp#diameter_avp{data = [A]}}.
-decode_error(_, [Error | _], ComponentAvps, _, _, Avp, Acc) ->
- decode_error(Error, Avp, Acc, ComponentAvps);
+%% set/3
-decode_error(_, Error, ComponentAvps, _, _, Avp, Acc) ->
- decode_error(Error, Avp, Acc, ComponentAvps).
+set(none, Avp, _Name) ->
+ Avp;
+set(_, Avp, Rec) ->
+ Avp#diameter_avp{value = Rec}.
-%% decode_error/5
+%% decode_error/6
+%%
+%% Error when decoding a non-grouped AVP.
-decode_error(Name, _Reason, #{failed_avp := true} = Opts, Mod, Avp, Acc) ->
- decode_AVP(Name, Avp, Opts, Mod, Acc);
+decode_error(true, _, _, _, _, Avp) ->
+ Avp;
-decode_error(Name, Reason, Opts, Mod, Avp, {Rec, Failed}) ->
+decode_error(false, Reason, Name, Mod, Opts, Avp) ->
Stack = diameter_lib:get_stacktrace(),
diameter_lib:log(decode_error,
?MODULE,
?LINE,
{Reason, Name, Avp#diameter_avp.name, Mod, Stack}),
- {Avp, {Rec, [rc(Reason, Avp, Opts, Mod) | Failed]}}.
+ case Reason of
+ {'DIAMETER', 5014 = RC, _} ->
+ %% Length error communicated from diameter_types or a
+ %% @custom_types/@codecs module.
+ AvpName = Avp#diameter_avp.name,
+ {RC, Avp#diameter_avp{data = Mod:empty_value(AvpName, Opts)}};
+ _ ->
+ {5004, Avp}
+ end.
-%% decode_error/4
+%% 3588/6733:
+%%
+%% DIAMETER_INVALID_AVP_VALUE 5004
+%% The request contained an AVP with an invalid value in its data
+%% portion. A Diameter message indicating this error MUST include
+%% the offending AVPs within a Failed-AVP AVP.
-decode_error({RC, ErrorData}, Avp, {Rec, Failed}, ComponentAvps) ->
- E = Avp#diameter_avp{data = [ErrorData]},
- {[Avp | trim(ComponentAvps)], {Rec, [{RC, E} | Failed]}}.
+%% avp/6
-%% set_strict/3
+avp(T, Data, AvpName, Opts, Mod, Mod) ->
+ Mod:avp(T, Data, AvpName, Opts);
+
+avp(T, Data, AvpName, Opts, _, Mod) ->
+ Mod:avp(T, Data, AvpName, Opts#{module := Mod}).
+%% set_strict/3
+%%
%% Set false as soon as we see a Grouped AVP that doesn't set the
%% M-bit, to ignore the M-bit on an encapsulated AVP.
+
set_strict('Grouped', false = M, #{strict_mbit := true} = Opts) ->
Opts#{strict_mbit := M};
set_strict(_, _, Opts) ->
@@ -476,102 +680,84 @@ set_failed('Failed-AVP', #{failed_avp := false} = Opts) ->
set_failed(_, Opts) ->
Opts.
-%% decode_AVP/5
-%%
-%% Don't know this AVP: see if it can be packed in an 'AVP' field
-%% undecoded. Note that the type field is 'undefined' in this case.
-
-decode_AVP(Name, Avp, Opts, Mod, Acc) ->
- {trim(Avp), pack_AVP(Name, Avp, Opts, Mod, Acc)}.
-
-%% rc/2
-
-%% diameter_types will raise an error of this form to communicate
-%% DIAMETER_INVALID_AVP_LENGTH (5014). A module specified to a
-%% @custom_types tag in a dictionary file can also raise an error of
-%% this form.
-rc({'DIAMETER', 5014 = RC, _}, #diameter_avp{name = AvpName} = Avp, Opts, Mod) ->
- {RC, Avp#diameter_avp{data = Mod:empty_value(AvpName, Opts)}};
-
-%% 3588:
-%%
-%% DIAMETER_INVALID_AVP_VALUE 5004
-%% The request contained an AVP with an invalid value in its data
-%% portion. A Diameter message indicating this error MUST include
-%% the offending AVPs within a Failed-AVP AVP.
-rc(_, Avp, _, _) ->
- {5004, Avp}.
-
-%% pack_avp/5
-
-pack_avp(Name, #diameter_avp{name = AvpName} = Avp, Opts, Mod, Acc) ->
- pack_avp(Name, Mod:avp_arity(Name, AvpName), Avp, Opts, Mod, Acc).
-
-%% pack_avp/6
-
-pack_avp(Name, 0, Avp, Opts, Mod, Acc) ->
- pack_AVP(Name, Avp, Opts, Mod, Acc);
-
-pack_avp(_, Arity, #diameter_avp{name = AvpName} = Avp, _Opts, Mod, Acc) ->
- pack(Arity, AvpName, Avp, Mod, Acc).
-
-%% pack_AVP/5
+%% acc/8
+
+acc([AM | Acc], As, I, Field, Arity, Strict, Mod, Opts) ->
+ [AM | acc1(Acc, As, I, Field, Arity, Strict, Mod, Opts)].
+
+%% acc1/8
+
+%% Faulty AVP, not grouped.
+acc1(Acc, {_RC, Avp} = E, _, _, _, _, _, _) ->
+ [Avps, Failed | Rec] = Acc,
+ [[Avp | Avps], [E | Failed] | Rec];
+
+%% Faulty component in grouped AVP.
+acc1(Acc, {RC, As, Avp}, _, _, _, _, _, _) ->
+ [Avps, Failed | Rec] = Acc,
+ [[As | Avps], [{RC, Avp} | Failed] | Rec];
+
+%% Grouped AVP ...
+acc1([Avps | Acc], [Avp|_] = As, I, Field, Arity, Strict, Mod, Opts) ->
+ [[As|Avps] | acc2(Acc, Avp, I, Field, Arity, Strict, Mod, Opts)];
+
+%% ... or not.
+acc1([Avps | Acc], Avp, I, Field, Arity, Strict, Mod, Opts) ->
+ [[Avp|Avps] | acc2(Acc, Avp, I, Field, Arity, Strict, Mod, Opts)].
+
+%% The component list of a Grouped AVP is discarded when packing into
+%% the record (or equivalent): the values in an 'AVP' field are
+%% diameter_avp records, not a list of records in the Grouped case,
+%% and the decode into the value field is best-effort. The reason is
+%% history more than logic: it would probably have made more sense to
+%% retain the same structure as in diameter_packet.avps, but an 'AVP'
+%% list has always been flat.
+
+%% acc2/8
+
+%% No errors, but nowhere to pack.
+acc2(Acc, Avp, _, 'AVP', 0, _, _, _) ->
+ [Failed | Rec] = Acc,
+ [[{rc(Avp), Avp} | Failed] | Rec];
+
+%% Relaxed arities.
+acc2(Acc, Avp, _, Field, Arity, Strict, Mod, _)
+ when Strict /= decode ->
+ pack(Arity, Field, Avp, Mod, Acc);
+
+%% No maximum arity.
+acc2(Acc, Avp, _, Field, {_,'*'} = Arity, _, Mod, _) ->
+ pack(Arity, Field, Avp, Mod, Acc);
+
+%% Or check.
+acc2(Acc, Avp, I, Field, Arity, _, Mod, _) ->
+ Mx = max_arity(Arity),
+ if Mx =< I ->
+ [Failed | Rec] = Acc,
+ [[{5009, Avp} | Failed] | Rec];
+ true ->
+ pack(Arity, Field, Avp, Mod, Acc)
+ end.
-%% Length failure was induced because of a header/payload length
-%% mismatch. The AVP Length is reset to match the received data if
-%% this AVP is encoded in an answer message, since the length is
-%% computed.
-%%
-%% Data is a truncated header if command_code = undefined, otherwise
-%% payload bytes. The former is padded to the length of a header if
-%% the AVP reaches an outgoing encode in diameter_codec.
+%% 3588/6733:
%%
-%% RFC 6733 says that an AVP returned with 5014 can contain a minimal
-%% payload for the AVP's type, but in this case we don't know the
-%% type.
-
-pack_AVP(_, #diameter_avp{data = {5014 = RC, Data}} = Avp, _, _, Acc) ->
- {Rec, Failed} = Acc,
- {Rec, [{RC, Avp#diameter_avp{data = Data}} | Failed]};
-
-pack_AVP(Name, Avp, Opts, Mod, Acc) ->
- pack_arity(Name, pack_arity(Name, Opts, Mod, Avp), Avp, Mod, Acc).
-
-%% pack_arity/5
-
-pack_arity(_, 0, #diameter_avp{is_mandatory = M} = Avp, _, Acc) ->
- {Rec, Failed} = Acc,
- {Rec, [{if M -> 5001; true -> 5008 end, Avp} | Failed]};
-
-pack_arity(_, Arity, Avp, Mod, Acc) ->
- pack(Arity, 'AVP', Avp, Mod, Acc).
+%% DIAMETER_AVP_OCCURS_TOO_MANY_TIMES 5009
+%% A message was received that included an AVP that appeared more
+%% often than permitted in the message definition. The Failed-AVP
+%% AVP MUST be included and contain a copy of the first instance of
+%% the offending AVP that exceeded the maximum number of occurrences
-%% Give Failed-AVP special treatment since (1) it'll contain any
-%% unrecognized mandatory AVP's and (2) the RFC 3588 grammar failed to
-%% allow for Failed-AVP in an answer-message.
+%% max_arity/1
-pack_arity(Name,
- #{strict_mbit := Strict,
- failed_avp := Failed},
- Mod,
- #diameter_avp{is_mandatory = M,
- name = AvpName}) ->
+max_arity(1) ->
+ 1;
+max_arity({_,Mx}) ->
+ Mx.
- %% Not testing just Name /= 'Failed-AVP' means we're changing the
- %% packing of AVPs nested within Failed-AVP, but the point of
- %% ignoring errors within Failed-AVP is to decode as much as
- %% possible, and failing because a mandatory AVP couldn't be
- %% packed into a dedicated field defeats that point.
+%% rc/1
- if Failed == true;
- Name == 'Failed-AVP';
- Name == 'answer-message', AvpName == 'Failed-AVP';
- not M;
- not Strict ->
- Mod:avp_arity(Name, 'AVP');
- true ->
- 0
- end.
+rc(#diameter_avp{is_mandatory = M}) ->
+ if M -> 5001; true -> 5008 end.
%% 3588:
%%
@@ -586,75 +772,99 @@ pack_arity(Name,
%% Failed-AVP AVP MUST be included and contain a copy of the
%% offending AVP.
+%% pack_arity/5
+
+%% Give Failed-AVP special treatment since (1) it'll contain any
+%% unrecognized mandatory AVP's and (2) the RFC 3588 grammar failed to
+%% allow for Failed-AVP in an answer-message.
+
+pack_arity(Name, AvpName, _, Mod, M)
+ when Name == 'Failed-AVP';
+ Name == 'answer-message', AvpName == 'Failed-AVP';
+ not M ->
+ Mod:avp_arity(Name, 'AVP');
+%% Not testing just Name /= 'Failed-AVP' means we're changing the
+%% packing of AVPs nested within Failed-AVP, but the point of
+%% ignoring errors within Failed-AVP is to decode as much as
+%% possible, and failing because a mandatory AVP couldn't be
+%% packed into a dedicated field defeats that point.
+
+pack_arity(Name, _, #{strict_mbit := Strict, failed_avp := Failed}, Mod, _)
+ when not Strict;
+ Failed ->
+ Mod:avp_arity(Name, 'AVP');
+
+pack_arity(_, _, _, _, _) ->
+ 0.
+
+%% avp_arity/5
+
+avp_arity(Name, 'AVP' = AvpName, Mod, Opts, M) ->
+ pack_arity(Name, AvpName, Opts, Mod, M);
+
+avp_arity(Name, AvpName, Mod, _, _) ->
+ Mod:avp_arity(Name, AvpName).
+
%% pack/5
-pack(Arity, FieldName, Avp, Mod, {Rec, _} = Acc) ->
- pack(Mod:'#get-'(FieldName, Rec), Arity, FieldName, Avp, Mod, Acc).
+pack(Arity, F, Avp, Mod, [Failed | Rec]) ->
+ [Failed | set(Arity, F, value(F, Avp), Mod, Rec)].
-%% pack/6
+%% set/5
-pack(undefined, 1, 'AVP' = F, Avp, Mod, {Rec, Failed}) -> %% unlikely
- {Mod:'#set-'({F, Avp}, Rec), Failed};
+set(_, _, _, _, Name)
+ when is_atom(Name) ->
+ Name;
-pack(undefined, 1, F, #diameter_avp{value = V}, Mod, {Rec, Failed}) ->
- {Mod:'#set-'({F, V}, Rec), Failed};
+set(1, F, Value, _, Map)
+ when is_map(Map) ->
+ Map#{F => Value};
-%% 3588:
-%%
-%% DIAMETER_AVP_OCCURS_TOO_MANY_TIMES 5009
-%% A message was received that included an AVP that appeared more
-%% often than permitted in the message definition. The Failed-AVP
-%% AVP MUST be included and contain a copy of the first instance of
-%% the offending AVP that exceeded the maximum number of occurrences
-%%
+set(_, F, V, _, Map)
+ when is_map(Map) ->
+ maps:update_with(F, fun(Vs) -> [V|Vs] end, [V], Map);
-pack(_, 1, _, Avp, _, {Rec, Failed}) ->
- {Rec, [{5009, Avp} | Failed]};
-
-pack(L, {_, Max}, F, Avp, Mod, {Rec, Failed}) ->
- case '*' /= Max andalso has_prefix(Max+1, L) of
- true ->
- {Rec, [{5009, Avp} | Failed]};
- false when F == 'AVP' ->
- {Mod:'#set-'({F, [Avp | L]}, Rec), Failed};
- false ->
- {Mod:'#set-'({F, [Avp#diameter_avp.value | L]}, Rec), Failed}
- end.
+set(1, F, Value, Mod, Rec) ->
+ Mod:'#set-'({F, Value}, Rec);
+
+set(_, F, V, Mod, Rec) ->
+ Vs = Mod:'#get-'(F, Rec),
+ Mod:'#set-'({F, [V|Vs]}, Rec).
+
+%% value/2
+
+value('AVP', Avp) ->
+ Avp;
+
+value(_, #diameter_avp{value = V}) ->
+ V.
%% ---------------------------------------------------------------------------
%% # grouped_avp/3
%% ---------------------------------------------------------------------------
--spec grouped_avp(decode, avp_name(), binary() | {5014, binary()}, term())
+%% Note that Grouped is the only AVP type that doesn't just return a
+%% decoded value, also returning the list of component diameter_avp
+%% records.
+
+-spec grouped_avp(decode, avp_name(), binary(), term())
-> {avp_record(), [avp()]};
(encode, avp_name(), avp_record() | avp_values(), term())
-> iolist()
| no_return().
-%% Length error induced by diameter_codec:collect_avps/1: the AVP
-%% length in the header was too short (insufficient for the extracted
-%% header) or too long (past the end of the message). An empty payload
-%% is sufficient according to the RFC text for 5014.
-grouped_avp(decode, _Name, {5014 = RC, _Bin}, _) ->
- ?THROW({grouped, {RC, []}, []});
-
-grouped_avp(decode, Name, Data, Opts) ->
- grouped_decode(Name, diameter_codec:collect_avps(Data), Opts);
+%% An error in decoding a component AVP throws the first faulty
+%% component, which a catch wraps in the Grouped AVP in question. A
+%% partially decoded record is only used when ignoring errors in
+%% Failed-AVP.
+grouped_avp(decode, Name, Bin, Opts) ->
+ {Rec, Avps, Es} = T = decode_avps(Name, Bin, Opts),
+ [] == Es orelse ?THROW(T),
+ {Rec, Avps};
grouped_avp(encode, Name, Data, Opts) ->
encode_avps(Name, Data, Opts).
-%% grouped_decode/2
-%%
-%% Note that Grouped is the only AVP type that doesn't just return a
-%% decoded value, also returning the list of component diameter_avp
-%% records.
-
-%% Length error in trailing component AVP.
-grouped_decode(_Name, {Error, Acc}, _) ->
- {5014, Avp} = Error,
- ?THROW({grouped, Error, [Avp | Acc]});
-
%% 7.5. Failed-AVP AVP
%% In the case where the offending AVP is embedded within a Grouped AVP,
@@ -665,15 +875,6 @@ grouped_decode(_Name, {Error, Acc}, _) ->
%% to the single offending AVP. This enables the recipient to detect
%% the location of the offending AVP when embedded in a group.
-%% An error in decoding a component AVP throws the first faulty
-%% component, which the catch in d/3 wraps in the Grouped AVP in
-%% question. A partially decoded record is only used when ignoring
-%% errors in Failed-AVP.
-grouped_decode(Name, ComponentAvps, Opts) ->
- {Rec, Avps, Es} = decode_avps(Name, ComponentAvps, Opts),
- [] == Es orelse ?THROW({grouped, [{_,_} = hd(Es) | Rec], Avps}),
- {Rec, Avps}.
-
%% ---------------------------------------------------------------------------
%% # empty_group/2
%% ---------------------------------------------------------------------------
@@ -705,5 +906,45 @@ empty(Name, #{module := Mod} = Opts) ->
%% ------------------------------------------------------------------------------
+%% newrec/4
+
+newrec(none, _, Name, _) ->
+ Name;
+
+newrec(record, Mod, Name, T)
+ when T /= decode ->
+ RecName = Mod:name2rec(Name),
+ Sz = Mod:'#info-'(RecName, size),
+ erlang:make_tuple(Sz, [], [{1, RecName}]);
+
+newrec(record, Mod, Name, _) ->
+ newrec(Mod, Name);
+
+newrec(_, _, _, _) ->
+ #{}.
+
+%% newrec/2
+
newrec(Mod, Name) ->
Mod:'#new-'(Mod:name2rec(Name)).
+
+%% reformat/5
+
+reformat(Name, Map, _Strict, Mod, list) ->
+ [{F,V} || {F,_} <- Mod:avp_arity(Name), #{F := V} <- [Map]];
+
+reformat(Name, Map, Strict, Mod, record_from_map) ->
+ RecName = Mod:name2rec(Name),
+ list_to_tuple([RecName | [mget(F, Map, def(A, Strict))
+ || {F,A} <- Mod:avp_arity(Name)]]);
+
+reformat(_, Rec, _, _, _) ->
+ Rec.
+
+%% def/2
+
+def(1, decode) ->
+ undefined;
+
+def(_, _) ->
+ [].
diff --git a/lib/diameter/src/base/diameter_lib.erl b/lib/diameter/src/base/diameter_lib.erl
index 8792e97621..1c1ea42cb5 100644
--- a/lib/diameter/src/base/diameter_lib.erl
+++ b/lib/diameter/src/base/diameter_lib.erl
@@ -283,7 +283,7 @@ ip(T)
%% Or not: convert from '.'/':'-separated decimal/hex.
ip(Addr) ->
- {ok, A} = inet_parse:address(Addr), %% documented in inet(3)
+ {ok, A} = inet:parse_address(Addr),
A.
%% ---------------------------------------------------------------------------
diff --git a/lib/diameter/src/base/diameter_peer.erl b/lib/diameter/src/base/diameter_peer.erl
index 2759f17e64..4cb5a57a54 100644
--- a/lib/diameter/src/base/diameter_peer.erl
+++ b/lib/diameter/src/base/diameter_peer.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2015. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -202,10 +202,10 @@ match1(Addr, Match) ->
match(Addr, {ok, A}, _) ->
Addr == A;
match(Addr, {error, _}, RE) ->
- match == re:run(inet_parse:ntoa(Addr), RE, [{capture, none}]).
+ match == re:run(inet:ntoa(Addr), RE, [{capture, none}, caseless]).
addr([_|_] = A) ->
- inet_parse:address(A);
+ inet:parse_address(A);
addr(A) ->
{ok, A}.
diff --git a/lib/diameter/src/base/diameter_peer_fsm.erl b/lib/diameter/src/base/diameter_peer_fsm.erl
index 1b0dc417e5..d99f11a697 100644
--- a/lib/diameter/src/base/diameter_peer_fsm.erl
+++ b/lib/diameter/src/base/diameter_peer_fsm.erl
@@ -128,7 +128,8 @@
%% outgoing DPR; boolean says whether or not
%% the request was sent explicitly with
%% diameter:call/4.
- codec :: #{string_decode := boolean(),
+ codec :: #{decode_format := diameter:decode_format(),
+ string_decode := boolean(),
strict_mbit := boolean(),
rfc := 3588 | 6733,
ordered_encode := false},
@@ -237,7 +238,7 @@ i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) ->
proplists:get_value(dpa_timeout, Opts, ?DPA_TIMEOUT)}),
Tmo = proplists:get_value(capx_timeout, Opts, ?CAPX_TIMEOUT),
- Strictness = proplists:get_value(capx_strictness, Opts, true),
+ Strict = proplists:get_value(strict_capx, Opts, true),
LengthErr = proplists:get_value(length_errors, Opts, exit),
{TPid, Addrs} = start_transport(T, Rest, Svc),
@@ -251,9 +252,10 @@ i({Ack, WPid, {M, Ref} = T, Opts, {SvcOpts, Nodes, Dict0, Svc}}) ->
mode = M,
service = svc(Svc, Addrs),
length_errors = LengthErr,
- strict = Strictness,
+ strict = Strict,
incoming_maxlen = Maxlen,
- codec = maps:with([string_decode,
+ codec = maps:with([decode_format,
+ string_decode,
strict_mbit,
rfc,
ordered_encode],
@@ -542,11 +544,11 @@ put_route(Pid) ->
MRef = monitor(process, Pid),
put(Pid, MRef).
-%% get_route/2
+%% get_route/3
-%% incoming answer
-get_route(_, #diameter_packet{header = #diameter_header{is_request = false}}
- = Pkt) ->
+%% Incoming answer.
+get_route(_, _, #diameter_packet{header = #diameter_header{is_request = false}}
+ = Pkt) ->
Seqs = diameter_codec:sequence_numbers(Pkt),
case erase(Seqs) of
{Pid, Ref, MRef} ->
@@ -557,8 +559,14 @@ get_route(_, #diameter_packet{header = #diameter_header{is_request = false}}
false
end;
-%% incoming request
-get_route(Ack, _) ->
+%% Requests answered here ...
+get_route(_, N, _)
+ when N == 'CER';
+ N == 'DPR' ->
+ false;
+
+%% ... or not.
+get_route(Ack, _, _) ->
Ack.
%% erase_route/1
@@ -649,10 +657,6 @@ encode(Rec, Opts, Dict) ->
%% incoming/2
-incoming({recv = T, Name, Pkt}, #state{parent = Pid, ack = Ack} = S) ->
- Pid ! {T, self(), get_route(Ack, Pkt), Name, Pkt},
- rcv(Name, Pkt, S);
-
incoming(#diameter_header{is_request = R}, #state{transport = TPid,
ack = Ack}) ->
R andalso Ack andalso send(TPid, false),
@@ -670,98 +674,97 @@ incoming(T, _) ->
%% recv/2
-recv(#diameter_packet{header = #diameter_header{} = Hdr}
- = Pkt,
- #state{dictionary = Dict0}
- = S) ->
- recv1(diameter_codec:msg_name(Dict0, Hdr), Pkt, S);
-
-recv(#diameter_packet{header = undefined,
- bin = Bin}
- = Pkt,
- S) ->
- recv(diameter_codec:decode_header(Bin), Pkt, S);
+recv(#diameter_packet{bin = Bin} = Pkt, S) ->
+ recv(Bin, Pkt, S);
recv(Bin, S) ->
- recv(#diameter_packet{bin = Bin}, S).
+ recv(Bin, Bin, S).
+
+%% recv/3
+
+recv(Bin, Msg, S) ->
+ recv(diameter_codec:decode_header(Bin), Bin, Msg, S).
-%% recv1/3
+%% recv/4
-recv1(_,
- #diameter_packet{header = H, bin = Bin},
- #state{incoming_maxlen = M})
+recv(false, Bin, _, #state{length_errors = E}) ->
+ invalid(E, truncated_header, Bin),
+ Bin;
+
+recv(#diameter_header{length = Len} = H, Bin, Msg, #state{length_errors = E,
+ incoming_maxlen = M,
+ dictionary = Dict0}
+ = S)
+ when E == handle;
+ 0 == Len rem 4, bit_size(Bin) == 8*Len, size(Bin) =< M ->
+ recv1(diameter_codec:msg_name(Dict0, H), H, Msg, S);
+
+recv(H, Bin, _, #state{incoming_maxlen = M})
when M < size(Bin) ->
invalid(false, incoming_maxlen_exceeded, {size(Bin), H}),
H;
+recv(H, Bin, _, #state{length_errors = E}) ->
+ T = {size(Bin), bit_size(Bin) rem 8, H},
+ invalid(E, message_length_mismatch, T),
+ H.
+
+%% recv1/4
+
%% Ignore anything but an expected CER/CEA if so configured. This is
%% non-standard behaviour.
-recv1(Name, #diameter_packet{header = H}, #state{state = {'Wait-CEA', _, _},
- strict = false})
+recv1(Name, H, _, #state{state = {'Wait-CEA', _, _},
+ strict = false})
when Name /= 'CEA' ->
H;
-recv1(Name, #diameter_packet{header = H}, #state{state = recv_CER,
- strict = false})
+recv1(Name, H, _, #state{state = recv_CER,
+ strict = false})
when Name /= 'CER' ->
H;
%% Incoming request after outgoing DPR: discard. Don't discard DPR, so
%% both ends don't do so when sending simultaneously.
-recv1(Name,
- #diameter_packet{header = #diameter_header{is_request = true} = H},
- #state{dpr = {_,_,_}})
+recv1(Name, #diameter_header{is_request = true} = H, _, #state{dpr = {_,_,_}})
when Name /= 'DPR' ->
invalid(false, recv_after_outgoing_dpr, H),
H;
%% Incoming request after incoming DPR: discard.
-recv1(_,
- #diameter_packet{header = #diameter_header{is_request = true} = H},
- #state{dpr = true}) ->
+recv1(_, #diameter_header{is_request = true} = H, _, #state{dpr = true}) ->
invalid(false, recv_after_incoming_dpr, H),
H;
%% DPA with identifier mismatch, or in response to a DPR initiated by
%% the service.
-recv1('DPA' = N,
- #diameter_packet{header = #diameter_header{hop_by_hop_id = Hid,
- end_to_end_id = Eid}}
- = Pkt,
- #state{dpr = {X,H,E}}
+recv1('DPA' = Name,
+ #diameter_header{hop_by_hop_id = Hid, end_to_end_id = Eid}
+ = H,
+ Msg,
+ #state{dpr = {X,HI,EI}}
= S)
- when H /= Hid;
- E /= Eid;
+ when HI /= Hid;
+ EI /= Eid;
not X ->
- rcv(N, Pkt, S);
+ Pkt = pkt(H, Msg),
+ handle(Name, Pkt, S);
-%% Any other message with a header and no length errors: send to the
-%% parent.
-recv1(Name, Pkt, #state{}) ->
- {recv, Name, Pkt}.
+%% Any other message with a header and no length errors.
+recv1(Name, H, Msg, #state{parent = Pid, ack = Ack} = S) ->
+ Pkt = pkt(H, Msg),
+ Pid ! {recv, self(), get_route(Ack, Name, Pkt), Name, Pkt},
+ handle(Name, Pkt, S).
-%% recv/3
+%% pkt/2
-recv(#diameter_header{length = Len}
- = H,
- #diameter_packet{bin = Bin}
- = Pkt,
- #state{length_errors = E}
- = S)
- when E == handle;
- 0 == Len rem 4, bit_size(Bin) == 8*Len ->
- recv(Pkt#diameter_packet{header = H}, S);
+pkt(H, Bin)
+ when is_binary(Bin) ->
+ #diameter_packet{header = H,
+ bin = Bin};
-recv(#diameter_header{}
- = H,
- #diameter_packet{bin = Bin},
- #state{length_errors = E}) ->
- T = {size(Bin), bit_size(Bin) rem 8, H},
- invalid(E, message_length_mismatch, T),
- Bin;
+pkt(H, Pkt) ->
+ Pkt#diameter_packet{header = H}.
-recv(false, #diameter_packet{bin = Bin}, #state{length_errors = E}) ->
- invalid(E, truncated_header, Bin),
- Bin.
+%% invalid/3
%% Note that counters here only count discarded messages.
invalid(E, Reason, T) ->
@@ -770,39 +773,39 @@ invalid(E, Reason, T) ->
?LOG(Reason, T),
ok.
-%% rcv/3
+%% handle/3
%% Incoming CEA.
-rcv('CEA' = N,
- #diameter_packet{header = #diameter_header{end_to_end_id = Eid,
- hop_by_hop_id = Hid}}
- = Pkt,
- #state{state = {'Wait-CEA', Hid, Eid}}
- = S) ->
+handle('CEA' = N,
+ #diameter_packet{header = #diameter_header{end_to_end_id = Eid,
+ hop_by_hop_id = Hid}}
+ = Pkt,
+ #state{state = {'Wait-CEA', Hid, Eid}}
+ = S) ->
?LOG(recv, N),
handle_CEA(Pkt, S);
%% Incoming CER
-rcv('CER' = N, Pkt, #state{state = recv_CER} = S) ->
+handle('CER' = N, Pkt, #state{state = recv_CER} = S) ->
handle_request(N, Pkt, S);
%% Anything but CER/CEA in a non-Open state is an error, as is
%% CER/CEA in anything but recv_CER/Wait-CEA.
-rcv(Name, _, #state{state = PS})
+handle(Name, _, #state{state = PS})
when PS /= 'Open';
Name == 'CER';
Name == 'CEA' ->
{stop, {Name, PS}};
-rcv('DPR' = N, Pkt, S) ->
+handle('DPR' = N, Pkt, S) ->
handle_request(N, Pkt, S);
%% DPA in response to DPR, with the expected identifiers.
-rcv('DPA' = N,
- #diameter_packet{header = #diameter_header{end_to_end_id = Eid,
- hop_by_hop_id = Hid}
- = H}
- = Pkt,
+handle('DPA' = N,
+ #diameter_packet{header = #diameter_header{end_to_end_id = Eid,
+ hop_by_hop_id = Hid}
+ = H}
+ = Pkt,
#state{dictionary = Dict0,
transport = TPid,
dpr = {X, Hid, Eid},
@@ -813,7 +816,8 @@ rcv('DPA' = N,
%% service: explicit DPR is counted in the same way
%% as other explicitly sent requests.
incr(recv, H, Dict0),
- incr_rc(recv, diameter_codec:decode(Dict0, Opts, Pkt), Dict0)
+ {_, RecPkt} = decode(Dict0, Opts, Pkt),
+ incr_rc(recv, RecPkt, Dict0)
end,
diameter_peer:close(TPid),
{stop, N};
@@ -821,13 +825,13 @@ rcv('DPA' = N,
%% Ignore an unsolicited DPA in particular. Note that dpa_timeout
%% deals with the case in which the peer sends the wrong identifiers
%% in DPA.
-rcv('DPA' = N, #diameter_packet{header = H}, _) ->
+handle('DPA' = N, #diameter_packet{header = H}, _) ->
?LOG(ignored, N),
%% Note that these aren't counted in the normal recv counter.
diameter_stats:incr({diameter_codec:msg_id(H), recv, ignored}),
ok;
-rcv(_, _, _) ->
+handle(_, _, _) ->
ok.
%% incr/3
@@ -917,21 +921,30 @@ handle_request(Name,
= S) ->
?LOG(recv, Name),
incr(recv, H, Dict0),
- send_answer(Name, diameter_codec:decode(Dict0, Opts, Pkt), S).
+ send_answer(Name, decode(Dict0, Opts, Pkt), S).
+
+%% decode/3
+%%
+%% Decode the message as record for diameter_capx, and in the
+%% configured format for events.
+
+decode(Dict0, Opts, Pkt) ->
+ {diameter_codec:decode(Dict0, Opts, Pkt),
+ diameter_codec:decode(Dict0, Opts#{decode_format := record}, Pkt)}.
%% send_answer/3
-send_answer(Type, ReqPkt, #state{transport = TPid,
- dictionary = Dict,
- codec = Opts}
- = S) ->
- incr_error(recv, ReqPkt, Dict),
+send_answer(Type, {DecPkt, RecPkt}, #state{transport = TPid,
+ dictionary = Dict,
+ codec = Opts}
+ = S) ->
+ incr_error(recv, RecPkt, Dict),
#diameter_packet{header = H,
transport_data = TD}
- = ReqPkt,
+ = RecPkt,
- {Msg, PostF} = build_answer(Type, ReqPkt, S),
+ {Msg, PostF} = build_answer(Type, DecPkt, RecPkt, S),
%% An answer message clears the R and T flags and retains the P
%% flag. The E flag is set at encode.
@@ -959,15 +972,15 @@ eval([F|A], S) ->
eval(T, _) ->
close(T).
-%% build_answer/3
+%% build_answer/4
build_answer('CER',
+ DecPkt,
#diameter_packet{msg = CER,
header = #diameter_header{version
= ?DIAMETER_VERSION,
is_error = false},
- errors = []}
- = Pkt,
+ errors = []},
#state{dictionary = Dict0}
= S) ->
{SupportedApps, RCaps, CEA} = recv_CER(CER, S),
@@ -985,25 +998,25 @@ build_answer('CER',
orelse ?THROW(4003), %% DIAMETER_ELECTION_LOST
caps_cb(Caps)
of
- N -> {cea(CEA, N, Dict0), [fun open/5, Pkt,
+ N -> {cea(CEA, N, Dict0), [fun open/5, DecPkt,
SupportedApps,
Caps,
{accept, inband_security(IS)}]}
catch
?FAILURE(Reason) ->
- rejected(Reason, {'CER', Reason, Caps, Pkt}, S)
+ rejected(Reason, {'CER', Reason, Caps, DecPkt}, S)
end;
%% The error checks below are similar to those in diameter_traffic for
%% other messages. Should factor out the commonality.
build_answer(Type,
+ DecPkt,
#diameter_packet{header = H,
- errors = Es}
- = Pkt,
+ errors = Es},
S) ->
{RC, FailedAVP} = result_code(Type, H, Es),
- {answer(Type, RC, FailedAVP, S), post(Type, RC, Pkt, S)}.
+ {answer(Type, RC, FailedAVP, S), post(Type, RC, DecPkt, S)}.
inband_security([]) ->
?NO_INBAND_SECURITY;
@@ -1175,12 +1188,10 @@ handle_CEA(#diameter_packet{header = H}
= S) ->
incr(recv, H, Dict0),
- #diameter_packet{}
- = DPkt
- = diameter_codec:decode(Dict0, Opts, Pkt),
+ {DecPkt, RecPkt} = decode(Dict0, Opts, Pkt),
- RC = result_code(incr_rc(recv, DPkt, Dict0)),
- {SApps, IS, RCaps} = recv_CEA(DPkt, S),
+ RC = result_code(incr_rc(recv, RecPkt, Dict0)),
+ {SApps, IS, RCaps} = recv_CEA(RecPkt, S),
#diameter_caps{origin_host = {OH, DH}}
= Caps
@@ -1203,9 +1214,9 @@ handle_CEA(#diameter_packet{header = H}
orelse ?THROW(election_lost),
caps_cb(Caps)
of
- _ -> open(DPkt, SApps, Caps, {connect, hd([_] = IS)}, S)
+ _ -> open(DecPkt, SApps, Caps, {connect, hd([_] = IS)}, S)
catch
- ?FAILURE(Reason) -> close({'CEA', Reason, Caps, DPkt})
+ ?FAILURE(Reason) -> close({'CEA', Reason, Caps, DecPkt})
end.
%% Check more than the result code since the peer could send success
%% regardless. If not 2001 then a peer_up callback could do anything
diff --git a/lib/diameter/src/base/diameter_reg.erl b/lib/diameter/src/base/diameter_reg.erl
index 97e74657bd..bd5db54a5c 100644
--- a/lib/diameter/src/base/diameter_reg.erl
+++ b/lib/diameter/src/base/diameter_reg.erl
@@ -19,10 +19,11 @@
%%
%%
-%% The module implements a simple term -> pid registry.
+%% A simple term -> pid registry.
%%
-module(diameter_reg).
+
-behaviour(gen_server).
-export([add/1,
@@ -57,18 +58,18 @@
-type key() :: term().
-type from() :: {pid(), term()}.
+-type rcvr() :: [pid() | term()] %% subscribe
+ | from(). %% wait
-type pattern() :: term().
-record(state, {id = diameter_lib:now(),
- receivers = dict:new()
- :: dict:dict(pattern(), [[pid() | term()]%% subscribe
- | from()]), %% wait
+ notify = #{} :: #{pattern() => [rcvr()]},
monitors = sets:new() :: sets:set(pid())}).
%% The ?TABLE bag contains the Key -> Pid mapping, as {Key, Pid}
%% tuples. Each pid is stored in the monitors set to ensure only one
%% monitor for each pid: more are harmless, but unnecessary. A pattern
-%% is added to receivers a result of calls to wait/1 or subscribe/2:
+%% is added to notify a result of calls to wait/1 or subscribe/2:
%% changes to ?TABLE causes processes to be notified as required.
%% ===========================================================================
@@ -156,7 +157,7 @@ wait(Pat) ->
%% # subscribe(Pat, T)
%%
%% Like match/1, but additionally receive messages of the form
-%% {T, add|remove, {term(), pid()} when associations are added
+%% {T, add|remove, {term(), pid()}} when associations are added
%% or removed.
%% ===========================================================================
@@ -186,15 +187,12 @@ uptime() ->
-> [{pid(), [key()]}].
pids() ->
- to_list(fun swap/1).
-
-to_list(Fun) ->
- ets:foldl(fun(T,D) -> append(Fun(T), D) end, orddict:new(), ?TABLE).
+ append(ets:select(?TABLE, [{{'$1','$2'}, [], [{{'$2', '$1'}}]}])).
-append({K,V}, Dict) ->
- orddict:append(K, V, Dict).
-
-id(T) -> T.
+append(Pairs) ->
+ dict:to_list(lists:foldl(fun({K,V}, D) -> dict:append(K, V, D) end,
+ dict:new(),
+ Pairs)).
%% terms/0
@@ -202,9 +200,7 @@ id(T) -> T.
-> [{key(), [pid()]}].
terms() ->
- to_list(fun id/1).
-
-swap({X,Y}) -> {Y,X}.
+ append(ets:tab2list(?TABLE)).
%% subs/0
@@ -212,31 +208,19 @@ swap({X,Y}) -> {Y,X}.
-> [{pattern(), [{pid(), term()}]}].
subs() ->
- #state{receivers = RD} = state(),
- dict:fold(fun sub/3, orddict:new(), RD).
-
-sub(Pat, Ps, Dict) ->
- lists:foldl(fun([P|T], D) -> orddict:append(Pat, {P,T}, D);
- (_, D) -> D
- end,
- Dict,
- Ps).
+ #state{notify = Dict} = state(),
+ [{K, Ts} || {K,Ps} <- maps:to_list(Dict),
+ Ts <- [[{P,T} || [P|T] <- Ps]]].
%% waits/0
-spec waits()
- -> [{pattern(), [{from(), term()}]}].
+ -> [{pattern(), [from()]}].
waits() ->
- #state{receivers = RD} = state(),
- dict:fold(fun wait/3, orddict:new(), RD).
-
-wait(Pat, Ps, Dict) ->
- lists:foldl(fun({_,_} = F, D) -> orddict:append(Pat, F, D);
- (_, D) -> D
- end,
- Dict,
- Ps).
+ #state{notify = Dict} = state(),
+ [{K, Ts} || {K,Ps} <- maps:to_list(Dict),
+ Ts <- [[T || {_,_} = T <- Ps]]].
%% ----------------------------------------------------------
%% # init/1
@@ -250,33 +234,28 @@ init(_) ->
%% # handle_call/3
%% ----------------------------------------------------------
-handle_call({add, Uniq, Key}, {Pid, _}, S0) ->
+handle_call({add, Uniq, Key}, {Pid, _}, S) ->
Rec = {Key, Pid},
- S1 = flush(Uniq, Rec, S0),
+ NS = flush(Uniq, Rec, S), %% before insert
{Res, New} = insert(Uniq, Rec),
- {Recvs, S} = add(New, Rec, S1),
- notify(Recvs, Rec),
- {reply, Res, S};
+ {reply, Res, notify(add, New andalso Rec, NS)};
handle_call({remove, Key}, {Pid, _}, S) ->
Rec = {Key, Pid},
- Recvs = delete([Rec], S),
ets:delete_object(?TABLE, Rec),
- notify(Recvs, remove),
- {reply, true, S};
+ {reply, true, notify(remove, Rec, S)};
-handle_call({wait, Pat}, {Pid, _} = From, #state{receivers = RD} = S) ->
+handle_call({wait, Pat}, {Pid, _} = From, S) ->
NS = add_monitor(Pid, S),
case match(Pat) of
- [_|_] = L ->
- {reply, L, NS};
+ [_|_] = Recs ->
+ {reply, Recs, NS};
[] ->
- {noreply, NS#state{receivers = dict:append(Pat, From, RD)}}
+ {noreply, queue(Pat, From, NS)}
end;
-handle_call({subscribe, Pat, T}, {Pid, _}, #state{receivers = RD} = S) ->
- NS = add_monitor(Pid, S),
- {reply, match(Pat), NS#state{receivers = dict:append(Pat, [Pid | T], RD)}};
+handle_call({subscribe, Pat, T}, {Pid, _}, S) ->
+ {reply, match(Pat), queue(Pat, [Pid | T], add_monitor(Pid, S))};
handle_call(state, _, S) ->
{reply, S, S};
@@ -332,106 +311,60 @@ insert(true, Rec) ->
B = ets:insert_new(?TABLE, Rec), %% entry inserted?
{B, B}.
-%% add/3
-
+%% add_monitor/2
+%%
%% Only add a single monitor for any given process, since there's no
%% use to more.
-add(true, {_Key, Pid} = Rec, S) ->
- NS = add_monitor(Pid, S),
- {Recvs, RD} = add(Rec, NS),
- {Recvs, S#state{receivers = RD}};
-
-add(false = No, _, S) ->
- {No, S}.
-
-%% add/2
-
-%% Notify processes whose patterns match the inserted key.
-add({_Key, Pid} = Rec, #state{receivers = RD}) ->
- dict:fold(fun(Pt, Ps, A) ->
- add(lists:member(Rec, match(Pt, Pid)), Pt, Ps, Rec, A)
- end,
- {sets:new(), RD},
- RD).
-
-%% add/5
-
-add(true, Pat, Recvs, {_,_} = Rec, {Set, Dict}) ->
- {lists:foldl(fun sets:add_element/2, Set, Recvs),
- remove(fun erlang:is_list/1, Pat, Recvs, Dict)};
-add(false, _, _, _, Acc) ->
- Acc.
+add_monitor(Pid, #state{monitors = Ps} = S) ->
+ case sets:is_element(Pid, Ps) of
+ false ->
+ monitor(process, Pid),
+ S#state{monitors = sets:add_element(Pid, Ps)};
+ true ->
+ S
+ end.
-%% add_monitor/2
-
-add_monitor(Pid, #state{monitors = MS} = S) ->
- add_monitor(sets:is_element(Pid, MS), Pid, S).
-
-%% add_monitor/3
-
-add_monitor(false, Pid, #state{monitors = MS} = S) ->
- monitor(process, Pid),
- S#state{monitors = sets:add_element(Pid, MS)};
-
-add_monitor(true, _, S) ->
- S.
-
-%% delete/2
-
-delete(Recs, #state{receivers = RD}) ->
- lists:foldl(fun(R,S) -> delete(R, RD, S) end, sets:new(), Recs).
+%% notify/3
-%% delete/3
+notify(_, false, S) ->
+ S;
-delete({_Key, Pid} = Rec, RD, Set) ->
- dict:fold(fun(Pt, Ps, S) ->
- delete(lists:member(Rec, match(Pt, Pid)), Rec, Ps, S)
- end,
- Set,
- RD).
+notify(Op, {_,_} = Rec, #state{notify = Dict} = S) ->
+ S#state{notify = maps:fold(fun(P,Rs,D) -> notify(Op, Rec, P, Rs, D) end,
+ Dict,
+ Dict)}.
-%% delete/4
+%% notify/5
-%% Entry matches a pattern ...
-delete(true, Rec, Recvs, Set) ->
- lists:foldl(fun(R,S) -> sets:add_element({R, Rec}, S) end,
- Set,
- Recvs);
-
-%% ... or not.
-delete(false, _, _, Set) ->
- Set.
-
-%% notify/2
-
-notify(false = No, _) ->
- No;
-
-notify(Recvs, remove = Op) ->
- sets:fold(fun({P,R}, N) -> send(P, R, Op), N+1 end, 0, Recvs);
-
-notify(Recvs, {_,_} = Rec) ->
- sets:fold(fun(P,N) -> send(P, Rec, add), N+1 end, 0, Recvs).
+notify(Op, {_, Pid} = Rec, Pat, Rcvrs, Dict) ->
+ case lists:member(Rec, match(Pat, Pid)) of
+ true ->
+ reset(Pat, Dict, [P || P <- Rcvrs, send(P, Op, Rec)]);
+ false ->
+ Dict
+ end.
%% send/3
-%% No processes waiting on remove, by construction: they've either
-%% received notification at add or aren't waiting.
-send([Pid | T], Rec, Op) ->
- Pid ! {T, Op, Rec};
+send([Pid | T], Op, Rec) ->
+ Pid ! {T, Op, Rec},
+ true;
-send({_,_} = From, Rec, add) ->
- gen_server:reply(From, [Rec]).
+%% No processes wait on remove: they receive notification immediately
+%% or at add, by construction.
+send({_,_} = From, add, Rec) ->
+ gen_server:reply(From, [Rec]),
+ false.
%% down/2
-down(Pid, #state{monitors = MS} = S) ->
- NS = flush(Pid, S),
- Recvs = delete(match('_', Pid), NS),
+down(Pid, #state{monitors = Ps} = S) ->
+ Recs = match('_', Pid),
ets:match_delete(?TABLE, {'_', Pid}),
- notify(Recvs, remove),
- NS#state{monitors = sets:del_element(Pid, MS)}.
+ lists:foldl(fun(R,NS) -> notify(remove, R, NS) end,
+ flush(Pid, S#state{monitors = sets:del_element(Pid, Ps)}),
+ Recs).
%% flush/3
@@ -452,16 +385,15 @@ flush(false, _, S) ->
%% flush/2
%% Process has died and should no longer receive messages/replies.
-flush(Pid, #state{receivers = RD} = S)
- when is_pid(Pid) ->
- S#state{receivers = dict:fold(fun(Pt,Ps,D) -> flush(Pid, Pt, Ps, D) end,
- RD,
- RD)}.
+flush(Pid, #state{notify = Dict} = S) ->
+ S#state{notify = maps:fold(fun(P,Rs,D) -> flush(Pid, P, Rs, D) end,
+ Dict,
+ Dict)}.
%% flush/4
-flush(Pid, Pat, Recvs, Dict) ->
- remove(fun(T) -> Pid /= head(T) end, Pat, Recvs, Dict).
+flush(Pid, Pat, Rcvrs, Dict) ->
+ reset(Pat, Dict, [T || T <- Rcvrs, Pid /= head(T)]).
%% head/1
@@ -471,15 +403,18 @@ head([P|_]) ->
head({P,_}) ->
P.
-%% remove/4
+%% reset/3
+
+reset(Key, Map, []) ->
+ maps:remove(Key, Map);
+
+reset(Key, Map, List) ->
+ maps:put(Key, List, Map).
+
+%% queue/3
-remove(Pred, Key, Values, Dict) ->
- case lists:filter(Pred, Values) of
- [] ->
- dict:erase(Key, Dict);
- Rest ->
- dict:store(Key, Rest, Dict)
- end.
+queue(Pat, Rcvr, #state{notify = Dict} = S) ->
+ S#state{notify = maps:put(Pat, [Rcvr | maps:get(Pat, Dict, [])], Dict)}.
%% call/1
diff --git a/lib/diameter/src/base/diameter_service.erl b/lib/diameter/src/base/diameter_service.erl
index a976a8b998..31dd92f878 100644
--- a/lib/diameter/src/base/diameter_service.erl
+++ b/lib/diameter/src/base/diameter_service.erl
@@ -112,8 +112,24 @@
use_shared_peers := diameter:remotes(),%% use from
restrict_connections := diameter:restriction(),
incoming_maxlen := diameter:message_length(),
+ strict_arities => diameter:strict_arities(),
strict_mbit := boolean(),
+ decode_format := diameter:decode_format(),
+ avp_dictionaries => nonempty_list(module()),
+ traffic_counters := boolean(),
string_decode := boolean(),
+ capabilities_cb => diameter:evaluable(),
+ pool_size => pos_integer(),
+ capx_timeout => diameter:'Unsigned32'(),
+ strict_capx => boolean(),
+ disconnect_cb => diameter:evaluable(),
+ dpr_timeout => diameter:'Unsigned32'(),
+ dpa_timeout => diameter:'Unsigned32'(),
+ length_errors => exit | handle | discard,
+ connect_timer => diameter:'Unsigned32'(),
+ watchdog_timer => diameter:'Unsigned32'()
+ | {module(), atom(), list()},
+ watchdog_config => [{okay|suspect, non_neg_integer()}],
spawn_opt := list() | {module(), atom(), list()}}}).
%% Record representing an RFC 3539 watchdog process implemented by
@@ -514,6 +530,13 @@ transition({tc_timeout, T}, S) ->
tc_timeout(T, S),
ok;
+transition({nodeup, Node, _}, S) ->
+ nodeup(Node, S),
+ ok;
+
+transition({nodedown, _Node, _}, _) ->
+ ok;
+
transition(Req, S) ->
unexpected(handle_info, [Req], S),
ok.
@@ -679,12 +702,15 @@ i(SvcName) ->
cfg_acc({SvcName, #diameter_service{applications = Apps} = Rec, Opts},
{false, Acc}) ->
lists:foreach(fun init_mod/1, Apps),
+ #{monitor := M}
+ = SvcOpts
+ = service_opts(Opts),
S = #state{service_name = SvcName,
service = Rec#diameter_service{pid = self()},
local = init_peers(),
remote = init_peers(),
- monitor = mref(get_value(monitor, Opts)),
- options = service_options(lists:keydelete(monitor, 1, Opts))},
+ monitor = mref(M),
+ options = maps:remove(monitor, SvcOpts)},
{S, Acc};
cfg_acc({_Ref, Type, _Opts} = T, {S, Acc})
@@ -699,8 +725,29 @@ init_peers() ->
%% Alias,
%% TPid}
-service_options(Opts) ->
- maps:from_list(Opts).
+service_opts(Opts) ->
+ remove([{strict_arities, true},
+ {avp_dictionaries, []}],
+ maps:merge(maps:from_list([{monitor, false} | def_opts()]),
+ maps:from_list(Opts))).
+
+remove(List, Map) ->
+ maps:filter(fun(K,V) -> not lists:member({K,V}, List) end,
+ Map).
+
+def_opts() -> %% defaults on the service map
+ [{share_peers, false},
+ {use_shared_peers, false},
+ {sequence, {0,32}},
+ {restrict_connections, nodes},
+ {incoming_maxlen, 16#FFFFFF},
+ {strict_arities, true},
+ {strict_mbit, true},
+ {decode_format, record},
+ {avp_dictionaries, []},
+ {traffic_counters, true},
+ {string_decode, true},
+ {spawn_opt, []}].
mref(false = No) ->
No;
@@ -709,6 +756,8 @@ mref(P) ->
init_shared(#state{options = #{use_shared_peers := T},
service_name = Svc}) ->
+ T == false orelse net_kernel:monitor_nodes(true, [{node_type, visible},
+ nodedown_reason]),
notify(T, Svc, {service, self()}).
init_mod(#diameter_app{alias = Alias,
@@ -718,16 +767,17 @@ init_mod(#diameter_app{alias = Alias,
start_fsm({Ref, Type, Opts}, S) ->
start(Ref, {Type, Opts}, S).
-get_value(Key, Vs) ->
- {_, V} = lists:keyfind(Key, 1, Vs),
- V.
-
notify(Share, SvcName, T) ->
Nodes = remotes(Share),
[] /= Nodes andalso diameter_peer:notify(Nodes, SvcName, T).
%% Test for the empty list for upgrade reasons: there's no
%% diameter_peer:notify/3 in old code.
+nodeup(Node, #state{options = #{share_peers := SP},
+ service_name = SvcName}) ->
+ lists:member(Node, remotes(SP))
+ andalso diameter_peer:notify([Node], SvcName, {service, self()}).
+
remotes(false) ->
[];
@@ -806,7 +856,7 @@ start(Ref, Type, Opts, State) ->
start(Ref, Type, Opts, N, #state{watchdogT = WatchdogT,
local = {PeerT, _, _},
options = #{string_decode := SD}
- = SvcOpts0,
+ = SvcOpts,
service_name = SvcName,
service = Svc0})
when Type == connect;
@@ -815,12 +865,12 @@ start(Ref, Type, Opts, N, #state{watchdogT = WatchdogT,
= Svc1
= merge_service(Opts, Svc0),
Svc = binary_caps(Svc1, SD),
- SvcOpts = merge_options(Opts, SvcOpts0),
- RecvData = diameter_traffic:make_recvdata([SvcName, PeerT, Apps, SvcOpts]),
- T = {Opts, SvcOpts, RecvData, Svc},
+ {SOpts, TOpts} = merge_opts(SvcOpts, Opts),
+ RecvData = diameter_traffic:make_recvdata([SvcName, PeerT, Apps, SOpts]),
+ T = {TOpts, SOpts, RecvData, Svc},
Rec = #watchdog{type = Type,
ref = Ref,
- options = Opts},
+ options = TOpts},
diameter_lib:fold_n(fun(_,A) ->
[wd(Type, Ref, T, WatchdogT, Rec) | A]
@@ -828,10 +878,14 @@ start(Ref, Type, Opts, N, #state{watchdogT = WatchdogT,
[],
N).
-merge_options(Opts, SvcOpts) ->
- Keys = maps:keys(SvcOpts),
- Map = maps:from_list([KV || {K,_} = KV <- Opts, lists:member(K, Keys)]),
- maps:merge(SvcOpts, Map).
+merge_opts(SvcOpts, Opts) ->
+ Keys = [K || {K,_} <- def_opts()],
+ SO = [T || {K,_} = T <- Opts, lists:member(K, Keys)],
+ TO = Opts -- SO,
+ {maps:merge(maps:with(Keys, SvcOpts), maps:from_list(SO)),
+ TO ++ [T || {K,_} = T <- maps:to_list(SvcOpts),
+ not lists:member(K, Keys),
+ not lists:keymember(K, 1, Opts)]}.
binary_caps(Svc, true) ->
Svc;
@@ -1400,9 +1454,15 @@ is_remote(Pid, T) ->
%% # remote_peer_up/4
%% ---------------------------------------------------------------------------
-remote_peer_up(TPid, Aliases, Caps, #state{options = #{use_shared_peers := T}}
+remote_peer_up(TPid, Aliases, Caps, #state{options = #{use_shared_peers := T},
+ remote = {PeerT, _, _}}
= S) ->
- is_remote(TPid, T) andalso rpu(TPid, Aliases, Caps, S).
+ is_remote(TPid, T)
+ andalso not ets:member(PeerT, TPid)
+ andalso rpu(TPid, Aliases, Caps, S).
+
+%% Notification can be duplicate since remote nodes push and the local
+%% node pulls.
rpu(TPid, Aliases, Caps, #state{service = Svc, remote = RT}) ->
#diameter_service{applications = Apps} = Svc,
@@ -1412,6 +1472,7 @@ rpu(TPid, Aliases, Caps, #state{service = Svc, remote = RT}) ->
rpu(_, [] = No, _, _) ->
No;
+
rpu(TPid, Aliases, Caps, {PeerT, _, _} = RT) ->
monitor(process, TPid),
ets:insert(PeerT, #peer{pid = TPid,
diff --git a/lib/diameter/src/base/diameter_traffic.erl b/lib/diameter/src/base/diameter_traffic.erl
index 85378babea..f510f40a17 100644
--- a/lib/diameter/src/base/diameter_traffic.erl
+++ b/lib/diameter/src/base/diameter_traffic.erl
@@ -70,13 +70,17 @@
timeout = 5000 :: 0..16#FFFFFFFF, %% for outgoing requests
detach = false :: boolean()}).
-%% Term passed back to receive_message/6 with every incoming message.
+%% Term passed back to receive_message/5 with every incoming message.
-record(recvdata,
{peerT :: ets:tid(),
service_name :: diameter:service_name(),
apps :: [#diameter_app{}],
sequence :: diameter:sequence(),
- codec :: #{string_decode := boolean(),
+ counters :: boolean(),
+ codec :: #{decode_format := diameter:decode_format(),
+ avp_dictionaries => nonempty_list(module()),
+ string_decode := boolean(),
+ strict_arities => diameter:strict_arities(),
strict_mbit := boolean(),
incoming_maxlen := diameter:message_length()}}).
%% Note that incoming_maxlen is currently handled in diameter_peer_fsm,
@@ -89,6 +93,7 @@
caller :: pid() | undefined, %% calling process
handler :: pid(), %% request process
peer :: undefined | {pid(), #diameter_caps{}},
+ caps :: undefined, %% no longer used
packet :: #diameter_packet{} | undefined}). %% of request
%% ---------------------------------------------------------------------------
@@ -96,13 +101,17 @@
%% ---------------------------------------------------------------------------
make_recvdata([SvcName, PeerT, Apps, SvcOpts | _]) ->
- #{sequence := {_,_} = Mask, spawn_opt := Opts}
+ #{sequence := {_,_} = Mask, spawn_opt := Opts, traffic_counters := B}
= SvcOpts,
{Opts, #recvdata{service_name = SvcName,
peerT = PeerT,
apps = Apps,
sequence = Mask,
- codec = maps:with([string_decode,
+ counters = B,
+ codec = maps:with([decode_format,
+ avp_dictionaries,
+ string_decode,
+ strict_arities,
strict_mbit,
ordered_encode,
incoming_maxlen],
@@ -182,7 +191,7 @@ incr_error(Dir, Id, TPid) ->
%% ---------------------------------------------------------------------------
-spec incr_rc(send|recv, Pkt, TPid, DictT)
- -> {Counter, non_neg_integer()}
+ -> Counter
| Reason
when Pkt :: #diameter_packet{},
TPid :: pid(),
@@ -193,18 +202,26 @@ incr_error(Dir, Id, TPid) ->
| {'Experimental-Result', integer(), integer()},
Reason :: atom().
-incr_rc(Dir, Pkt, TPid, {_, AppDict, _} = DictT) ->
- try
- incr_result(Dir, Pkt, TPid, DictT)
+incr_rc(Dir, Pkt, TPid, {MsgDict, AppDict, Dict0}) ->
+ incr_rc(Dir, Pkt, TPid, MsgDict, AppDict, Dict0);
+
+incr_rc(Dir, Pkt, TPid, Dict0) ->
+ incr_rc(Dir, Pkt, TPid, Dict0, Dict0, Dict0).
+
+%% incr_rc/6
+
+incr_rc(Dir, Pkt, TPid, MsgDict, AppDict, Dict0) ->
+ try get_result(Dir, MsgDict, Dict0, Pkt) of
+ false ->
+ unknown;
+ Avp ->
+ incr_result(Dir, Avp, Pkt, TPid, AppDict)
catch
exit: {E,_} when E == no_result_code;
E == invalid_error_bit ->
incr(TPid, {msg_id(Pkt#diameter_packet.header, AppDict), Dir, E}),
E
- end;
-
-incr_rc(Dir, Pkt, TPid, Dict0) ->
- incr_rc(Dir, Pkt, TPid, {Dict0, Dict0, Dict0}).
+ end.
%% ---------------------------------------------------------------------------
%% receive_message/5
@@ -216,13 +233,13 @@ incr_rc(Dir, Pkt, TPid, Dict0) ->
-> pid() %% request handler
| boolean() %% answer, known request or not
| discard %% request discarded by MFA
- when Route :: {Handler, RequestRef, Seqs}
+ when Route :: {Handler, RequestRef, TPid}
| Ack,
RecvData :: {[SpawnOpt], #recvdata{}},
SpawnOpt :: term(),
Handler :: pid(),
RequestRef :: reference(),
- Seqs :: {0..16#FFFFFFFF, 0..16#FFFFFFFF},
+ TPid :: pid(),
Ack :: boolean().
receive_message(TPid, Route, Pkt, Dict0, RecvData) ->
@@ -303,14 +320,15 @@ recv_request(Ack,
= Pkt,
Dict0,
#recvdata{peerT = PeerT,
- apps = Apps}
+ apps = Apps,
+ counters = Count}
= RecvData) ->
Ack andalso (TPid ! {handler, self()}),
case diameter_service:find_incoming_app(PeerT, TPid, Id, Apps) of
{#diameter_app{id = Aid, dictionary = AppDict} = App, Caps} ->
- incr(recv, Pkt, TPid, AppDict),
+ Count andalso incr(recv, Pkt, TPid, AppDict),
DecPkt = decode(Aid, AppDict, RecvData, Pkt),
- incr_error(recv, DecPkt, TPid, AppDict),
+ Count andalso incr_error(recv, DecPkt, TPid, AppDict),
send_A(recv_R(App, TPid, Dict0, Caps, RecvData, DecPkt),
TPid,
App,
@@ -323,9 +341,7 @@ recv_request(Ack,
%% A request was sent for an application that is not
%% supported.
RC = 3007,
- Es = Pkt#diameter_packet.errors,
- DecPkt = Pkt#diameter_packet{avps = collect_avps(Pkt),
- errors = [RC | Es]},
+ DecPkt = diameter_codec:collect_avps(Pkt),
send_answer(answer_message(RC, Dict0, Caps, DecPkt),
TPid,
Dict0,
@@ -338,17 +354,11 @@ recv_request(Ack,
No
end.
+%% decode/4
+
decode(Id, Dict, #recvdata{codec = Opts}, Pkt) ->
errors(Id, diameter_codec:decode(Id, Dict, Opts, Pkt)).
-collect_avps(Pkt) ->
- case diameter_codec:collect_avps(Pkt) of
- {_Error, Avps} ->
- Avps;
- Avps ->
- Avps
- end.
-
%% send_A/7
send_A([T | Fs], TPid, App, Dict0, RecvData, DecPkt, Caps) ->
@@ -541,6 +551,7 @@ send_A({call, Opts}, TPid, App, Dict0, RecvData, Pkt, Caps, Fs) ->
MsgDict,
AppDict,
Dict0,
+ RecvData#recvdata.counters,
Fs);
RC ->
send_answer(answer_message(RC, Dict0, Caps, Pkt),
@@ -584,14 +595,22 @@ send_answer(Ans, TPid, MsgDict, AppDict, Dict0, RecvData, DecPkt, Fs) ->
TPid,
RecvData#recvdata.codec,
make_answer_packet(Ans, DecPkt, MsgDict, Dict0)),
- send_answer(Pkt, TPid, MsgDict, AppDict, Dict0, Fs).
+ send_answer(Pkt,
+ TPid,
+ MsgDict,
+ AppDict,
+ Dict0,
+ RecvData#recvdata.counters,
+ Fs).
-%% send_answer/6
+%% send_answer/7
-send_answer(Pkt, TPid, MsgDict, AppDict, Dict0, [EvalPktFs | EvalFs]) ->
+send_answer(Pkt, TPid, MsgDict, AppDict, Dict0, Count, [EvalPktFs | EvalFs]) ->
eval_packet(Pkt, EvalPktFs),
- incr(send, Pkt, TPid, AppDict),
- incr_rc(send, Pkt, TPid, {MsgDict, AppDict, Dict0}), %% count outgoing
+ Count andalso begin
+ incr(send, Pkt, TPid, AppDict),
+ incr_rc(send, Pkt, TPid, MsgDict, AppDict, Dict0)
+ end,
send(TPid, z(Pkt), _Route = self()),
lists:foreach(fun diameter_lib:eval/1, EvalFs).
@@ -619,7 +638,7 @@ is_answer_message(#diameter_packet{msg = Msg}, Dict0) ->
is_answer_message([#diameter_header{is_request = R, is_error = E} | _], _) ->
E andalso not R;
-%% Message sent as a tagged avp/value list.
+%% Message sent as a map or tagged avp/value list.
is_answer_message([Name | _], _) ->
Name == 'answer-message';
@@ -867,7 +886,10 @@ reset(Msg, [RC | Avps], Dict) ->
%% set/3
-%% Reply as name and tuple list ...
+%% Reply as name/values list ...
+set([Name|As], Avps, _)
+ when is_map(As) ->
+ [Name | maps:merge(As, maps:from_list(Avps))];
set([_|_] = Ans, Avps, _) ->
Ans ++ Avps; %% Values nearer tail take precedence.
@@ -900,33 +922,44 @@ failed_avp(_, [] = No, _) ->
failed_avp(Msg, [_|_] = Avps, Dict) ->
[failed(Msg, [{'AVP', Avps}], Dict)].
-%% Reply as name and tuple list ...
-failed([MsgName | Values], FailedAvp, Dict) ->
- RecName = Dict:msg2rec(MsgName),
+%% failed/3
+
+failed(Msg, FailedAvp, Dict) ->
+ RecName = msg2rec(Msg, Dict),
try
- Dict:'#info-'(RecName, {index, 'Failed-AVP'}),
+ Dict:'#info-'(RecName, {index, 'Failed-AVP'}), %% assert existence
{'Failed-AVP', [FailedAvp]}
catch
error: _ ->
- Avps = proplists:get_value('AVP', Values, []),
+ Avps = values(Msg, 'AVP', Dict),
A = #diameter_avp{name = 'Failed-AVP',
value = FailedAvp},
{'AVP', [A|Avps]}
+ end.
+
+%% msg2rec/2
+
+%% Message as name/values list ...
+msg2rec([MsgName | _], Dict) ->
+ Dict:msg2rec(MsgName);
+
+%% ... or record.
+msg2rec(Rec, _) ->
+ element(1, Rec).
+
+%% values/2
+
+%% Message as name/values list ...
+values([_ | Avps], F, _) ->
+ if is_map(Avps) ->
+ maps:get(F, Avps, []);
+ is_list(Avps) ->
+ proplists:get_value(F, Avps, [])
end;
%% ... or record.
-failed(Rec, FailedAvp, Dict) ->
- try
- RecName = element(1, Rec),
- Dict:'#info-'(RecName, {index, 'Failed-AVP'}),
- {'Failed-AVP', [FailedAvp]}
- catch
- error: _ ->
- Avps = Dict:'#get-'('AVP', Rec),
- A = #diameter_avp{name = 'Failed-AVP',
- value = FailedAvp},
- {'AVP', [A|Avps]}
- end.
+values(Rec, F, Dict) ->
+ Dict:'#get-'(F, Rec).
%% 3. Diameter Header
%%
@@ -1003,15 +1036,15 @@ answer_message(RC,
origin_realm = {OR,_}},
#diameter_packet{avps = Avps,
errors = Es}) ->
- {Code, _, Vid} = Dict0:avp_header('Session-Id'),
['answer-message', {'Origin-Host', OH},
{'Origin-Realm', OR},
- {'Result-Code', RC}]
- ++ session_id(Code, Vid, Avps)
- ++ failed_avp(RC, Es).
+ {'Result-Code', RC}
+ | session_id(Dict0, Avps)
+ ++ failed_avp(RC, Es)
+ ++ proxy_info(Dict0, Avps)].
-session_id(Code, Vid, Avps)
- when is_list(Avps) ->
+session_id(Dict0, Avps) ->
+ {Code, _, Vid} = Dict0:avp_header('Session-Id'),
try
#diameter_avp{data = Bin} = find_avp(Code, Vid, Avps),
[{'Session-Id', [Bin]}]
@@ -1029,6 +1062,14 @@ failed_avp(RC, [_ | Es]) ->
failed_avp(_, [] = No) ->
No.
+proxy_info(Dict0, Avps) ->
+ {Code, _, Vid} = Dict0:avp_header('Proxy-Info'),
+ [{'AVP', [A#diameter_avp{value = undefined}
+ || [#diameter_avp{code = C, vendor_id = I} = A | _]
+ <- Avps,
+ C == Code,
+ I == Vid]}].
+
%% find_avp/3
%% Grouped ...
@@ -1102,48 +1143,31 @@ find_avp(Code, VId, [_ | Avps]) ->
%% Message sent as a header/avps list.
incr_result(send = Dir,
- #diameter_packet{msg = [#diameter_header{} = H | _]}
- = Pkt,
+ Avp,
+ #diameter_packet{msg = [#diameter_header{} = H | _]},
TPid,
- DictT) ->
- incr_res(Dir, Pkt#diameter_packet{header = H}, TPid, DictT);
-
-%% Outgoing message as binary: don't count. (Sending binaries is only
-%% partially supported.)
-incr_result(send, #diameter_packet{header = undefined = No}, _, _) ->
- No;
+ AppDict) ->
+ incr_result(Dir, Avp, H, [], TPid, AppDict);
%% Incoming or outgoing. Outgoing with encode errors never gets here
%% since encode fails.
-incr_result(Dir, Pkt, TPid, DictT) ->
- incr_res(Dir, Pkt, TPid, DictT).
-
-incr_res(Dir,
- #diameter_packet{header = #diameter_header{is_error = E}
- = Hdr,
- errors = Es}
- = Pkt,
- TPid,
- DictT) ->
- {MsgDict, AppDict, Dict0} = DictT,
+incr_result(Dir, Avp, Pkt, TPid, AppDict) ->
+ #diameter_packet{header = H, errors = Es}
+ = Pkt,
+ incr_result(Dir, Avp, H, Es, TPid, AppDict).
+
+%% incr_result/6
+incr_result(Dir, Avp, Hdr, Es, TPid, AppDict) ->
Id = msg_id(Hdr, AppDict),
%% Could be {relay, 0}, in which case the R-bit is redundant since
%% only answers are being counted. Let it be however, so that the
%% same tuple is in both send/recv and result code counters.
%% Count incoming decode errors.
- recv /= Dir orelse [] == Es orelse incr_error(Dir, Id, TPid, AppDict),
-
- %% Exit on a missing result code.
- T = rc_counter(MsgDict, Dir, Pkt),
- T == false andalso ?LOGX(no_result_code, {MsgDict, Dir, Hdr}),
- {Ctr, RC, Avp} = T,
-
- %% Or on an inappropriate value.
- is_result(RC, E, Dict0)
- orelse ?LOGX(invalid_error_bit, {MsgDict, Dir, Hdr, Avp}),
+ send == Dir orelse [] == Es orelse incr_error(Dir, Id, TPid, AppDict),
+ Ctr = rcc(Avp),
incr(TPid, {Id, Dir, Ctr}),
Ctr.
@@ -1188,7 +1212,50 @@ is_result(RC, true, _) ->
incr(TPid, Counter) ->
diameter_stats:incr(Counter, TPid, 1).
-%% rc_counter/3
+%% rcc/1
+
+rcc(#diameter_avp{name = 'Result-Code' = Name, value = V}) ->
+ {Name, head(V)};
+
+rcc(#diameter_avp{name = 'Experimental-Result', value = V}) ->
+ head(V).
+
+%% head/1
+
+head([V|_]) ->
+ V;
+head(V) ->
+ V.
+
+%% rcv/1
+
+rcv(#diameter_avp{name = N, value = V}) ->
+ rcv(N, head(V)).
+
+%% rcv/2
+
+rcv('Experimental-Result', {_,_,N}) ->
+ N;
+
+rcv('Result-Code', N) ->
+ N.
+
+%% get_result/4
+
+%% Message sent as binary: no checks or counting.
+get_result(_, _, _, #diameter_packet{header = undefined}) ->
+ false;
+
+get_result(Dir, MsgDict, Dict0, Pkt) ->
+ Avp = get_result(MsgDict, msg(Dir, Pkt)),
+ Hdr = Pkt#diameter_packet.header,
+ %% Exit on a missing result code or inappropriate value.
+ Avp == false
+ andalso ?LOGX(no_result_code, {MsgDict, Dir, Hdr}),
+ E = Hdr#diameter_header.is_error,
+ is_result(rcv(Avp), E, Dict0)
+ orelse ?LOGX(invalid_error_bit, {MsgDict, Dir, Hdr, Avp}),
+ Avp.
%% RFC 3588, 7.6:
%%
@@ -1196,46 +1263,29 @@ incr(TPid, Counter) ->
%% applications MUST include either one Result-Code AVP or one
%% Experimental-Result AVP.
-rc_counter(Dict, Dir, #diameter_packet{header = H,
- avps = As,
- msg = Msg})
+%% msg/2
+
+msg(Dir, #diameter_packet{header = H,
+ avps = As,
+ msg = Msg})
when Dir == recv; %% decoded incoming
Msg == undefined -> %% relayed outgoing
- rc_counter(Dict, [H|As]);
-
-rc_counter(Dict, _, #diameter_packet{msg = Msg}) ->
- rc_counter(Dict, Msg).
-
-rc_counter(Dict, Msg) ->
- rcc(get_result(Dict, Msg)).
-
-rcc(#diameter_avp{name = 'Result-Code' = Name, value = N} = A)
- when is_integer(N) ->
- {{Name, N}, N, A};
-
-rcc(#diameter_avp{name = 'Result-Code' = Name, value = [N|_]} = A)
- when is_integer(N) ->
- {{Name, N}, N, A};
+ [H|As];
-rcc(#diameter_avp{name = 'Experimental-Result', value = {_,_,N} = T} = A)
- when is_integer(N) ->
- {T, N, A};
-
-rcc(#diameter_avp{name = 'Experimental-Result', value = [{_,_,N} = T|_]} = A)
- when is_integer(N) ->
- {T, N, A};
-
-rcc(_) ->
- false.
+msg(_, #diameter_packet{msg = Msg}) ->
+ Msg.
%% get_result/2
get_result(Dict, Msg) ->
try
[throw(A) || N <- ['Result-Code', 'Experimental-Result'],
- #diameter_avp{} = A <- [get_avp(Dict, N, Msg)]]
+ #diameter_avp{} = A <- [get_avp(Dict, N, Msg)],
+ is_integer(catch rcv(A))],
+ false
catch
- #diameter_avp{} = A -> A
+ #diameter_avp{} = A ->
+ A
end.
x(T) ->
@@ -1359,7 +1409,7 @@ make_opts([T | _], _, _, _, _, _) ->
send_request({{TPid, _Caps} = TC, App}
= Transport,
- #{sequence := Mask}
+ #{sequence := Mask, traffic_counters := Count}
= SvcOpts,
Msg0,
CallOpts,
@@ -1375,9 +1425,15 @@ send_request({{TPid, _Caps} = TC, App}
SvcOpts,
ReqPkt),
eval_packet(EncPkt, Fs),
- T = send_R(ReqPkt, EncPkt, Transport, CallOpts, Caller, SvcName),
+ T = send_R(ReqPkt,
+ EncPkt,
+ Transport,
+ CallOpts,
+ Caller,
+ Count,
+ SvcName),
Ans = recv_answer(SvcName, App, CallOpts, T),
- handle_answer(SvcName, SvcOpts, App, Ans);
+ handle_answer(SvcName, Count, SvcOpts, App, Ans);
{discard, Reason} ->
{error, Reason};
discard ->
@@ -1520,6 +1576,7 @@ send_R(ReqPkt,
{{TPid, _Caps} = TC, #diameter_app{dictionary = AppDict}},
#options{timeout = Timeout},
{Pid, Ref},
+ Count,
SvcName) ->
Req = #request{ref = Ref,
caller = Pid,
@@ -1527,7 +1584,7 @@ send_R(ReqPkt,
peer = TC,
packet = ReqPkt},
- incr(send, EncPkt, TPid, AppDict),
+ Count andalso incr(send, EncPkt, TPid, AppDict),
{TRef, MRef} = zend_requezt(TPid, EncPkt, Req, SvcName, Timeout),
Pid ! Ref, %% tell caller a send has been attempted
{TRef, MRef, Req}.
@@ -1559,15 +1616,16 @@ failover(SvcName, App, Req, CallOpts) ->
CallOpts,
SvcName).
-%% handle_answer/4
+%% handle_answer/5
-handle_answer(SvcName, _, App, {error, Req, Reason}) ->
+handle_answer(SvcName, _, _, App, {error, Req, Reason}) ->
#request{packet = Pkt,
peer = {_TPid, _Caps} = TC}
= Req,
cb(App, handle_error, [Reason, msg(Pkt), SvcName, TC]);
handle_answer(SvcName,
+ Count,
SvcOpts,
#diameter_app{id = Id,
dictionary = AppDict,
@@ -1581,43 +1639,50 @@ handle_answer(SvcName,
#request{peer = {TPid, _}}
= Req,
- incr(recv, DecPkt, TPid, AppDict),
-
- AnsPkt = try
- incr_result(recv, DecPkt, TPid, {MsgDict, AppDict, Dict0})
- of
- _ -> DecPkt
- catch
- exit: {no_result_code, _} ->
- %% RFC 6733 requires one of Result-Code or
- %% Experimental-Result, but the decode will have
- %% detected a missing AVP. If both are optional in
- %% the dictionary then this isn't a decode error:
- %% just continue on.
- DecPkt;
- exit: {invalid_error_bit, {_, _, _, Avp}} ->
- #diameter_packet{errors = Es}
- = DecPkt,
- E = {5004, Avp},
- DecPkt#diameter_packet{errors = [E|Es]}
- end,
-
- handle_answer(AnsPkt, SvcName, App, AE, Req).
+ answer(answer(DecPkt, TPid, MsgDict, AppDict, Dict0, Count),
+ SvcName,
+ App,
+ AE,
+ Req).
+
+%% answer/6
+
+answer(DecPkt, TPid, MsgDict, AppDict, Dict0, Count) ->
+ Count andalso incr(recv, DecPkt, TPid, AppDict),
+ try get_result(recv, MsgDict, Dict0, DecPkt) of
+ Avp ->
+ Count andalso false /= Avp
+ andalso incr_result(recv, Avp, DecPkt, TPid, AppDict),
+ DecPkt
+ catch
+ exit: {no_result_code, _} ->
+ %% RFC 6733 requires one of Result-Code or
+ %% Experimental-Result, but the decode will have
+ %% detected a missing AVP. If both are optional in
+ %% the dictionary then this isn't a decode error:
+ %% just continue on.
+ DecPkt;
+ exit: {invalid_error_bit, {_, _, _, Avp}} ->
+ #diameter_packet{errors = Es}
+ = DecPkt,
+ E = {5004, Avp},
+ DecPkt#diameter_packet{errors = [E|Es]}
+ end.
-%% handle_answer/5
+%% answer/5
-handle_answer(#diameter_packet{errors = Es}
- = Pkt,
- SvcName,
- App,
- AE,
- #request{peer = {_TPid, _Caps} = TC,
- packet = P})
+answer(#diameter_packet{errors = Es}
+ = Pkt,
+ SvcName,
+ App,
+ AE,
+ #request{peer = {_TPid, _Caps} = TC,
+ packet = P})
when callback == AE;
[] == Es ->
cb(App, handle_answer, [Pkt, msg(P), SvcName, TC]);
-handle_answer(#diameter_packet{header = H}, SvcName, _, AE, _) ->
+answer(#diameter_packet{header = H}, SvcName, _, AE, _) ->
handle_error(H, SvcName, AE).
%% handle_error/3
@@ -1830,10 +1895,8 @@ get_destination(Dict, Msg) ->
[str(get_avp_value(Dict, D, Msg)) || D <- ['Destination-Realm',
'Destination-Host']].
-%% This is not entirely correct. The avp could have an arity 1, in
-%% which case an empty list is a DiameterIdentity of length 0 rather
-%% than the list of no values we treat it as by mapping to undefined.
-%% This behaviour is documented.
+%% A DiameterIdentity has length at least one, so an empty list is not
+%% a Realm/Host.
str([]) ->
undefined;
str(T) ->
@@ -1841,16 +1904,12 @@ str(T) ->
%% get_avp/3
%%
-%% Find an AVP in a message of one of three forms:
-%%
-%% - a message record (as generated from a .dia spec) or
-%% - a list of an atom message name followed by 2-tuple, avp name/value pairs.
-%% - a list of a #diameter_header{} followed by #diameter_avp{} records,
-%%
-%% In the first two forms a dictionary module is used at encode to
-%% identify the type of the AVP and its arity in the message in
-%% question. The third form allows messages to be sent as is, without
-%% a dictionary, which is needed in the case of relay agents, for one.
+%% Find an AVP in a message in one of the decoded formats, or as a
+%% header/avps list. There are only four AVPs that are extracted here:
+%% Result-Code and Experimental-Result in order when constructing
+%% counter keys, and Destination-Host/Realm when selecting a next-hop
+%% peer. Experimental-Result is the only of type Grouped, and is given
+%% special treatment in order to return the value as a record.
%% Messages will be header/avps list as a relay and the only AVP's we
%% look for are in the common dictionary. This is required since the
@@ -1859,37 +1918,58 @@ str(T) ->
get_avp(?RELAY, Name, Msg) ->
get_avp(?BASE, Name, Msg);
-%% Message as a header/avps list.
+%% Message as header/avps list.
get_avp(Dict, Name, [#diameter_header{} | Avps]) ->
try
- {Code, _, VId} = Dict:avp_header(Name),
- find_avp(Code, VId, Avps)
- of
- A ->
- (avp_decode(Dict, Name, ungroup(A)))#diameter_avp{name = Name}
+ {Code, _, Vid} = Dict:avp_header(Name),
+ A = find_avp(Code, Vid, Avps),
+ avp_decode(Dict, Name, ungroup(A))
catch
error: _ ->
undefined
end;
-%% Outgoing message as a name/values list.
+%% Message as name/values list ...
get_avp(_, Name, [_MsgName | Avps]) ->
- case lists:keyfind(Name, 1, Avps) of
+ case find(Name, Avps) of
{_, V} ->
- #diameter_avp{name = Name, value = V};
+ #diameter_avp{name = Name, value = value(Name, V)};
_ ->
undefined
end;
-%% Message is typically a record but not necessarily.
+%% ... or record.
get_avp(Dict, Name, Rec) ->
- try
- #diameter_avp{name = Name, value = Dict:'#get-'(Name, Rec)}
+ try Dict:'#get-'(Name, Rec) of
+ V ->
+ #diameter_avp{name = Name, value = value(Name, V)}
catch
error:_ ->
undefined
end.
+value('Experimental-Result' = N, #{'Vendor-Id' := Vid,
+ 'Experimental-Result-Code' := RC}) ->
+ {N, Vid, RC};
+value('Experimental-Result' = N, [{'Experimental-Result-Code', RC},
+ {'Vendor-Id', Vid}]) ->
+ {N, Vid, RC};
+value('Experimental-Result' = N, [{'Vendor-Id', Vid},
+ {'Experimental-Result-Code', RC}]) ->
+ {N, Vid, RC};
+value(_, V) ->
+ V.
+
+%% find/2
+
+find(Key, Map)
+ when is_map(Map) ->
+ maps:find(Key, Map);
+
+find(Key, List)
+ when is_list(List) ->
+ lists:keyfind(Key, 1, List).
+
%% get_avp_value/3
get_avp_value(Dict, Name, Msg) ->
@@ -1909,18 +1989,25 @@ ungroup(Avp) ->
%% avp_decode/3
+%% Ensure Experimental-Result is decoded as record, since this format
+%% is used for counter keys.
+avp_decode(Dict, 'Experimental-Result' = N, #diameter_avp{data = Bin}
+ = Avp)
+ when is_binary(Bin) ->
+ {V,_} = Dict:avp(decode, Bin, N, decode_opts(Dict)),
+ Avp#diameter_avp{name = N, value = V};
+
avp_decode(Dict, Name, #diameter_avp{value = undefined,
data = Bin}
- = Avp) ->
- try Dict:avp(decode, Bin, Name, decode_opts(Dict)) of
- V ->
- Avp#diameter_avp{value = V}
- catch
- error:_ ->
- Avp
- end;
-avp_decode(_, _, #diameter_avp{} = Avp) ->
- Avp.
+ = Avp)
+ when is_binary(Bin) ->
+ V = Dict:avp(decode, Bin, Name, decode_opts(Dict)),
+ Avp#diameter_avp{name = Name, value = V};
+
+avp_decode(_, Name, #diameter_avp{} = Avp) ->
+ Avp#diameter_avp{name = Name}.
+
+%% cb/3
cb(#diameter_app{module = [_|_] = M}, F, A) ->
eval(M, F, A).
@@ -1933,7 +2020,9 @@ choose(false, _, X) -> X.
%% Decode options sufficient for AVP extraction.
decode_opts(Dict) ->
- #{string_decode => false,
+ #{decode_format => record,
+ string_decode => false,
strict_mbit => false,
failed_avp => false,
- dictionary => Dict}.
+ module => Dict,
+ app_dictionary => Dict}.
diff --git a/lib/diameter/src/base/diameter_watchdog.erl b/lib/diameter/src/base/diameter_watchdog.erl
index a63425d92a..43623334a9 100644
--- a/lib/diameter/src/base/diameter_watchdog.erl
+++ b/lib/diameter/src/base/diameter_watchdog.erl
@@ -72,12 +72,11 @@
restrict := boolean(),
suspect := non_neg_integer(), %% OKAY -> SUSPECT
okay := non_neg_integer()}, %% REOPEN -> OKAY
- codec :: #{string_decode := false,
+ codec :: #{decode_format := none,
+ string_decode := false,
strict_mbit := boolean(),
- failed_avp := false,
rfc := 3588 | 6733,
- ordered_encode := false,
- incoming_maxlen := diameter:message_length()},
+ ordered_encode := false},
shutdown = false :: boolean()}).
%% ---------------------------------------------------------------------------
@@ -135,13 +134,6 @@ i({Ack, T, Pid, {Opts,
putr(restart, {T, Opts, Svc, SvcOpts}), %% save seeing it in trace
putr(dwr, dwr(Caps)), %%
Nodes = restrict_nodes(Restrict),
- CodecKeys = [string_decode,
- strict_mbit,
- incoming_maxlen,
- spawn_opt,
- rfc,
- ordered_encode],
-
#watchdog{parent = Pid,
transport = start(T, Opts, SvcOpts, Nodes, Dict0, Svc),
tw = proplists:get_value(watchdog_timer,
@@ -149,14 +141,23 @@ i({Ack, T, Pid, {Opts,
?DEFAULT_TW_INIT),
receive_data = RecvData,
dictionary = Dict0,
- config =
- maps:without(CodecKeys,
- config(SvcOpts#{restrict => restrict(Nodes),
- suspect => 1,
- okay => 3},
- Opts)),
- codec = maps:with(CodecKeys, SvcOpts#{string_decode := false,
- ordered_encode => false})}.
+ config = maps:with([sequence,
+ restrict_connections,
+ restrict,
+ suspect,
+ okay],
+ config(SvcOpts#{restrict => restrict(Nodes),
+ suspect => 1,
+ okay => 3},
+ Opts)),
+ codec = maps:with([decode_format,
+ strict_mbit,
+ string_decode,
+ rfc,
+ ordered_encode],
+ SvcOpts#{decode_format := none,
+ string_decode := false,
+ ordered_encode => false})}.
wait(Ref, Pid) ->
receive
diff --git a/lib/diameter/src/compiler/diameter_codegen.erl b/lib/diameter/src/compiler/diameter_codegen.erl
index f56e4a5249..4e6fe32d69 100644
--- a/lib/diameter/src/compiler/diameter_codegen.erl
+++ b/lib/diameter/src/compiler/diameter_codegen.erl
@@ -21,15 +21,14 @@
-module(diameter_codegen).
%%
-%% This module generates erl/hrl files for encode/decode modules
-%% from the orddict parsed from a dictionary file (.dia) by
-%% diameter_dict_util. The generated code is simple (one-liners),
-%% the generated functions being called by code included iin the
-%% generated modules from diameter_gen.hrl. The orddict itself is
-%% returned by dict/0 in the generated module and diameter_dict_util
-%% calls this function when importing dictionaries as a consequence
-%% of @inherits sections. That is, @inherits introduces a dependency
-%% on the beam file of another dictionary.
+%% This module generates erl/hrl files for encode/decode modules from
+%% the orddict parsed from a dictionary file by diameter_dict_util.
+%% The generated code is simple (one-liners), and is called from
+%% diameter_gen. The orddict itself is returned by dict/0 in the
+%% generated module and diameter_dict_util calls this function when
+%% importing dictionaries as a consequence of @inherits sections. That
+%% is, @inherits introduces a dependency on the beam file of another
+%% dictionary.
%%
-export([from_dict/4,
diff --git a/lib/diameter/src/compiler/diameter_dict_util.erl b/lib/diameter/src/compiler/diameter_dict_util.erl
index f9f2b02e94..7b53e51cb6 100644
--- a/lib/diameter/src/compiler/diameter_dict_util.erl
+++ b/lib/diameter/src/compiler/diameter_dict_util.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2010-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2010-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -923,7 +923,7 @@ xa([D|_] = Ds, [[Qual, D, {_, Line, AvpName}] | Avps], Dict, Key, Name) ->
store_new({Key, {Name, AvpName}},
[Line, Qual, D],
Dict,
- [Name, Line],
+ [AvpName, Line],
avp_already_referenced),
Key,
Name);
diff --git a/lib/diameter/src/compiler/diameter_exprecs.erl b/lib/diameter/src/compiler/diameter_exprecs.erl
index 9a0cb6baf2..143dede037 100644
--- a/lib/diameter/src/compiler/diameter_exprecs.erl
+++ b/lib/diameter/src/compiler/diameter_exprecs.erl
@@ -110,9 +110,9 @@
%% parse_transform/2
parse_transform(Forms, _Options) ->
- Rs = [R || {attribute, _, record, R} <- Forms],
- Es = lists:append([E || {attribute, _, export_records, E} <- Forms]),
{H,T} = lists:splitwith(fun is_head/1, Forms),
+ Rs = [R || {attribute, _, record, R} <- H],
+ Es = lists:append([E || {attribute, _, export_records, E} <- H]),
H ++ [a_export(Es) | f_accessors(Es, Rs)] ++ T.
is_head(T) ->
diff --git a/lib/diameter/src/dict/doic_rfc7683.dia b/lib/diameter/src/dict/doic_rfc7683.dia
new file mode 100644
index 0000000000..2b7804115e
--- /dev/null
+++ b/lib/diameter/src/dict/doic_rfc7683.dia
@@ -0,0 +1,50 @@
+;;
+;; %CopyrightBegin%
+;;
+;; Copyright Ericsson AB 2017. All Rights Reserved.
+;;
+;; Licensed under the Apache License, Version 2.0 (the "License");
+;; you may not use this file except in compliance with the License.
+;; You may obtain a copy of the License at
+;;
+;; http://www.apache.org/licenses/LICENSE-2.0
+;;
+;; Unless required by applicable law or agreed to in writing, software
+;; distributed under the License is distributed on an "AS IS" BASIS,
+;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+;; See the License for the specific language governing permissions and
+;; limitations under the License.
+;;
+;; %CopyrightEnd%
+;;
+
+@name diameter_gen_doic_rfc7683
+@prefix diameter_doic
+
+@avp_types
+
+ OC-Supported-Features 621 Grouped -
+ OC-Feature-Vector 622 Unsigned64 -
+ OC-OLR 623 Grouped -
+ OC-Sequence-Number 624 Unsigned64 -
+ OC-Validity-Duration 625 Unsigned32 -
+ OC-Report-Type 626 Enumerated -
+ OC-Reduction-Percentage 627 Unsigned32 -
+
+@enum OC-Report-Type
+
+ HOST_REPORT 0
+ REALM_REPORT 1
+
+@grouped
+
+ OC-Supported-Features ::= < AVP Header: 621 >
+ [ OC-Feature-Vector ]
+ * [ AVP ]
+
+ OC-OLR ::= < AVP Header: 623 >
+ < OC-Sequence-Number >
+ < OC-Report-Type >
+ [ OC-Reduction-Percentage ]
+ [ OC-Validity-Duration ]
+ * [ AVP ]
diff --git a/lib/diameter/src/modules.mk b/lib/diameter/src/modules.mk
index bb3b234d20..bb86de016a 100644
--- a/lib/diameter/src/modules.mk
+++ b/lib/diameter/src/modules.mk
@@ -24,6 +24,7 @@ DICTS = \
base_rfc6733 \
base_accounting \
acct_rfc6733 \
+ doic_rfc7683 \
relay
# The yecc grammar for the dictionary parser.
diff --git a/lib/diameter/src/transport/diameter_sctp.erl b/lib/diameter/src/transport/diameter_sctp.erl
index 6a9f1f940b..4eb3379d59 100644
--- a/lib/diameter/src/transport/diameter_sctp.erl
+++ b/lib/diameter/src/transport/diameter_sctp.erl
@@ -79,7 +79,7 @@
-type option() :: {sender, boolean()}
| sender
| {packet, boolean() | raw}
- | {message_cb, false | diameter:evaluable()}.
+ | {message_cb, false | diameter:eval()}.
-type uint() :: non_neg_integer().
@@ -102,9 +102,10 @@
streams :: {uint(), uint()} %% {InStream, OutStream} counts
| undefined,
os = 0 :: uint(), %% next output stream
+ rotate = 1 :: boolean() | 0 | 1, %% rotate os?
packet = true :: boolean() %% legacy transport_data?
| raw,
- message_cb = false :: false | diameter:evaluable(),
+ message_cb = false :: false | diameter:eval(),
send = false :: pid() | boolean()}). %% sending process
%% Monitor process state.
@@ -112,7 +113,7 @@
{transport :: pid(),
ack = false :: boolean(),
socket :: gen_sctp:sctp_socket(),
- assoc_id :: gen_sctp:assoc_id()}). %% next output stream
+ assoc_id :: gen_sctp:assoc_id()}).
%% Listener process state.
-record(listener,
@@ -120,7 +121,7 @@
socket :: gen_sctp:sctp_socket(),
service :: pid(), %% service process
pending = {0, queue:new()},
- opts :: [[match()] | boolean() | diameter:evaluable()]}).
+ opts :: [[match()] | boolean() | diameter:eval()]}).
%% Field pending implements two queues: the first of transport-to-be
%% processes to which an association has been assigned but for which
%% diameter hasn't yet spawned a transport process, a short-lived
@@ -156,12 +157,7 @@ start(T, Svc, Opts)
= Svc,
diameter_sctp_sup:start(), %% start supervisors on demand
Addrs = Caps#diameter_caps.host_ip_address,
- s(T, Addrs, Pid, lists:map(fun ip/1, Opts)).
-
-ip({ifaddr, A}) ->
- {ip, A};
-ip(T) ->
- T.
+ s(T, Addrs, Pid, Opts).
%% A listener spawns transports either as a consequence of this call
%% when there is not yet an association to assign it, or at comm_up on
@@ -354,23 +350,35 @@ l([], Ref, T) ->
%% open/3
open(Addrs, Opts, PortNr) ->
- {LAs, Os} = addrs(Addrs, Opts),
- {LAs, case gen_sctp:open(gen_opts(portnr(Os, PortNr))) of
- {ok, Sock} ->
- Sock;
- {error, Reason} ->
- x({open, Reason})
- end}.
+ case gen_sctp:open(gen_opts(portnr(addrs(Addrs, Opts), PortNr))) of
+ {ok, Sock} ->
+ {addrs(Sock), Sock};
+ {error, Reason} ->
+ x({open, Reason})
+ end.
addrs(Addrs, Opts) ->
- case proplists:split(Opts, [ip]) of
- {[[]], _} ->
- {Addrs, Opts ++ [{ip, A} || A <- Addrs]};
- {[As], Os} ->
- LAs = [diameter_lib:ipaddr(A) || {ip, A} <- As],
- {LAs, Os ++ [{ip, A} || A <- LAs]}
+ case lists:mapfoldl(fun ipaddr/2, false, Opts) of
+ {Os, true} ->
+ Os;
+ {_, false} ->
+ Opts ++ [{ip, A} || A <- Addrs]
end.
+ipaddr({K,A}, _)
+ when K == ifaddr;
+ K == ip ->
+ {{ip, ipaddr(A)}, true};
+ipaddr(T, B) ->
+ {T, B}.
+
+ipaddr(A)
+ when A == loopback;
+ A == any ->
+ A;
+ipaddr(A) ->
+ diameter_lib:ipaddr(A).
+
portnr(Opts, PortNr) ->
case proplists:get_value(port, Opts) of
undefined ->
@@ -379,6 +387,14 @@ portnr(Opts, PortNr) ->
Opts
end.
+addrs(Sock) ->
+ case inet:socknames(Sock) of
+ {ok, As} ->
+ [A || {A,_} <- As];
+ {error, Reason} ->
+ x({socknames, Reason})
+ end.
+
%% x/1
x(Reason) ->
@@ -565,7 +581,7 @@ transition(Msg, S)
%% Deferred actions from a message_cb.
transition({actions, Dir, Acts}, S) ->
- actions(Acts, Dir, S);
+ setopts(ok, actions(Acts, Dir, S));
%% Request to close the transport connection.
transition({diameter, {close, Pid}}, #transport{parent = Pid}) ->
@@ -677,11 +693,16 @@ send(#diameter_packet{transport_data = {outstream, SId}}
= S) ->
send(SId rem OS, Msg, S);
-%% ... or not: rotate through all streams.
-send(Msg, #transport{streams = {_, OS},
+%% ... or not: rotate when sending on multiple streams ...
+send(Msg, #transport{rotate = true,
+ streams = {_, OS},
os = N}
= S) ->
- send(N, Msg, S#transport{os = (N + 1) rem OS}).
+ send(N, Msg, S#transport{os = (N + 1) rem OS});
+
+%% ... or send on the only stream available.
+send(Msg, S) ->
+ send(0, Msg, S).
%% send/3
@@ -749,7 +770,7 @@ recv({[#sctp_sndrcvinfo{assoc_id = Id}], _Bin}
%% Inbound Diameter message.
recv({[#sctp_sndrcvinfo{}], Bin} = Msg, S)
when is_binary(Bin) ->
- message(recv, Msg, S);
+ message(recv, Msg, recv(S));
recv({_, #sctp_shutdown_event{}}, _) ->
stop;
@@ -769,6 +790,25 @@ recv({_, #sctp_paddr_change{}}, _) ->
recv({_, #sctp_pdapi_event{}}, _) ->
ok.
+%% recv/1
+%%
+%% Start sending unordered after the second reception, so that an
+%% outgoing CER/CEA will arrive at the peer before another request.
+
+recv(#transport{rotate = B} = S)
+ when is_boolean(B) ->
+ S;
+
+recv(#transport{rotate = 0, streams = {_,N}, socket = Sock} = S) ->
+ ok = inet:setopts(Sock, [{sctp_default_send_param,
+ #sctp_sndrcvinfo{flags = [unordered]}}]),
+ S#transport{rotate = 1 < N};
+
+recv(#transport{rotate = N} = S) ->
+ S#transport{rotate = N-1}.
+
+%% publish/4
+
publish(T, Ref, Id, Sock) ->
true = diameter_reg:add_new({?MODULE, T, {Ref, {Id, Sock}}}),
putr(?INFO_KEY, {gen_sctp, Sock}). %% for info/1
diff --git a/lib/diameter/src/transport/diameter_tcp.erl b/lib/diameter/src/transport/diameter_tcp.erl
index a2f393d5d4..a8639baa11 100644
--- a/lib/diameter/src/transport/diameter_tcp.erl
+++ b/lib/diameter/src/transport/diameter_tcp.erl
@@ -87,8 +87,7 @@
module :: module() | undefined}).
-type length() :: 0..16#FFFFFF. %% message length from Diameter header
--type size() :: non_neg_integer(). %% accumulated binary size
--type frag() :: {length(), size(), binary(), list(binary())}
+-type frag() :: maybe_improper_list(length(), binary())
| binary().
-type connect_option() :: {raddr, inet:ip_address()}
@@ -111,7 +110,7 @@
-type option() :: {port, non_neg_integer()}
| {sender, boolean()}
| sender
- | {message_cb, false | diameter:evaluable()}
+ | {message_cb, false | diameter:eval()}
| {fragment_timer, 0..16#FFFFFFFF}.
%% Accepting/connecting transport process state.
@@ -126,7 +125,7 @@
timeout :: infinity | 0..16#FFFFFFFF, %% fragment timeout
tref = false :: false | reference(), %% fragment timer reference
flush = false :: boolean(), %% flush fragment at timeout?
- message_cb :: false | diameter:evaluable(),
+ message_cb :: false | diameter:eval(),
send :: pid() | false}). %% sending process
%% The usual transport using gen_tcp can be replaced by anything
@@ -143,8 +142,7 @@
-> {ok, pid(), [inet:ip_address()]}
when Ref :: diameter:transport_ref();
({connect, Ref}, #diameter_service{}, [connect_option()])
- -> {ok, pid(), [inet:ip_address()]}
- | {ok, pid()}
+ -> {ok, pid()}
when Ref :: diameter:transport_ref().
start({T, Ref}, Svc, Opts) ->
@@ -259,22 +257,14 @@ i(#monitor{parent = Pid, transport = TPid} = S) ->
i({listen, Ref, {Mod, Opts, Addrs}}) ->
[_] = diameter_config:subscribe(Ref, transport), %% assert existence
- {[LA, LP], Rest} = proplists:split(Opts, [ip, port]),
- LAddrOpt = get_addr(LA, Addrs),
- LPort = get_port(LP),
- {ok, LSock} = Mod:listen(LPort, gen_opts(LAddrOpt, Rest)),
- LAddr = laddr(LAddrOpt, Mod, LSock),
+ {[LP], Rest} = proplists:split(Opts, [port]),
+ {ok, LSock} = Mod:listen(get_port(LP), gen_opts(Addrs, Rest)),
+ {ok, {LAddr, _}} = sockname(Mod, LSock),
true = diameter_reg:add_new({?MODULE, listener, {Ref, {LAddr, LSock}}}),
proc_lib:init_ack({ok, self(), {LAddr, LSock}}),
#listener{socket = LSock,
module = Mod}.
-laddr([], Mod, Sock) ->
- {ok, {Addr, _Port}} = sockname(Mod, Sock),
- Addr;
-laddr([{ip, Addr}], _, _) ->
- Addr.
-
ssl_opts([]) ->
false;
ssl_opts([{ssl_options, true}]) ->
@@ -309,24 +299,16 @@ init(accept = T, Ref, Mod, Pid, Opts, Addrs, SvcPid) ->
Sock;
init(connect = T, Ref, Mod, Pid, Opts, Addrs, _SvcPid) ->
- {[LA, RA, RP], Rest} = proplists:split(Opts, [ip, raddr, rport]),
- LAddrOpt = get_addr(LA, Addrs),
+ {[RA, RP], Rest} = proplists:split(Opts, [raddr, rport]),
RAddr = get_addr(RA),
RPort = get_port(RP),
- proc_lib:init_ack(init_rc(LAddrOpt)),
- Sock = ok(connect(Mod, RAddr, RPort, gen_opts(LAddrOpt, Rest))),
+ proc_lib:init_ack({ok, self()}),
+ Sock = ok(connect(Mod, RAddr, RPort, gen_opts(Addrs, Rest))),
publish(Mod, T, Ref, Sock),
- up(Pid, {RAddr, RPort}, LAddrOpt, Mod, Sock),
+ up(Pid, {RAddr, RPort}, Mod, Sock),
Sock.
-init_rc([{ip, Addr}]) ->
- {ok, self(), [Addr]};
-init_rc([]) ->
- {ok, self()}.
-
-up(Pid, Remote, [{ip, _Addr}], _, _) ->
- diameter_peer:up(Pid, Remote);
-up(Pid, Remote, [], Mod, Sock) ->
+up(Pid, Remote, Mod, Sock) ->
{Addr, _Port} = ok(sockname(Mod, Sock)),
diameter_peer:up(Pid, Remote, [Addr]).
@@ -383,25 +365,41 @@ l([{{?MODULE, listener, {_, AS}}, LPid}], _, _) ->
l([], Ref, T) ->
diameter_tcp_sup:start_child({listen, Ref, T}).
-%% get_addr/1
+%% addrs/2
+%%
+%% Take the first address from the service if several are specified
+%% and not address is configured.
+
+addrs(Addrs, Opts) ->
+ case lists:mapfoldr(fun ipaddr/2, [], Opts) of
+ {Os, [_]} ->
+ Os;
+ {_, []} ->
+ Opts ++ [{ip, A} || [A|_] <- [Addrs]];
+ {_, As} ->
+ ?ERROR({invalid_addrs, As, Addrs})
+ end.
-get_addr(As) ->
- diameter_lib:ipaddr(addr(As, [])).
+ipaddr({K,A}, As)
+ when K == ifaddr;
+ K == ip ->
+ {{ip, ipaddr(A)}, [A | As]};
+ipaddr(T, B) ->
+ {T, B}.
-%% get_addr/2
+ipaddr(A)
+ when A == loopback;
+ A == any ->
+ A;
+ipaddr(A) ->
+ diameter_lib:ipaddr(A).
-get_addr([], []) ->
- [];
-get_addr(As, Def) ->
- [{ip, diameter_lib:ipaddr(addr(As, Def))}].
+%% get_addr/1
-%% Take the first address from the service if several are unspecified.
-addr([], [Addr | _]) ->
- Addr;
-addr([{_, Addr}], _) ->
- Addr;
-addr(As, Addrs) ->
- ?ERROR({invalid_addrs, As, Addrs}).
+get_addr([{_, Addr}]) ->
+ diameter_lib:ipaddr(Addr);
+get_addr(Addrs) ->
+ ?ERROR({invalid_addrs, Addrs}).
%% get_port/1
@@ -414,10 +412,15 @@ get_port(Ps) ->
%% gen_opts/2
-gen_opts(LAddrOpt, Opts) ->
+gen_opts(Addrs, Opts) ->
+ gen_opts(addrs(Addrs, Opts)).
+
+%% gen_opts/1
+
+gen_opts(Opts) ->
{L,_} = proplists:split(Opts, [binary, packet, active]),
[[],[],[]] == L orelse ?ERROR({reserved_options, Opts}),
- [binary, {packet, 0}, {active, false}] ++ LAddrOpt ++ Opts.
+ [binary, {packet, 0}, {active, false} | Opts].
%% ---------------------------------------------------------------------------
%% # ports/1
@@ -599,11 +602,12 @@ t(T,S) ->
%% Incoming packets.
transition({P, Sock, Bin}, #transport{socket = Sock,
- ssl = B}
+ ssl = B,
+ frag = Frag}
= S)
when P == ssl, true == B;
P == tcp ->
- recv(Bin, S#transport{active = false});
+ recv(acc(Frag, Bin), S);
%% Capabilties exchange has decided on whether or not to run over TLS.
transition({diameter, {tls, Ref, Type, B}}, #transport{parent = Pid}
@@ -640,7 +644,7 @@ transition(Msg, S)
%% Deferred actions from a message_cb.
transition({actions, Dir, Acts}, S) ->
- actions(Acts, Dir, S);
+ setopts(actions(Acts, Dir, S));
%% Request to close the transport connection.
transition({diameter, {close, Pid}}, #transport{parent = Pid,
@@ -720,86 +724,77 @@ tls(accept, Sock, Opts) ->
%% using Nagle.
%% Receive packets until a full message is received,
-recv(Bin, #transport{frag = Head} = S) ->
- case rcv(Head, Bin) of
- {Msg, B} -> %% have a complete message ...
- message(recv, Msg, S#transport{frag = B});
- Frag -> %% read more on the socket
- start_fragment_timer(setopts(S#transport{frag = Frag,
- flush = false}))
- end.
-%% rcv/2
+recv({Msg, Rest}, S) -> %% have a complete message ...
+ recv(acc(Rest), message(recv, Msg, S));
+
+recv(Frag, #transport{recv = B,
+ socket = Sock,
+ module = M}
+ = S) -> %% or not
+ B andalso setopts(M, Sock),
+ start_fragment_timer(S#transport{frag = Frag,
+ flush = false,
+ active = B}).
-%% No previous fragment.
-rcv(<<>>, Bin) ->
- rcv(Bin);
+%% acc/2
-%% Not even the first four bytes of the header.
-rcv(Head, Bin)
- when is_binary(Head) ->
- rcv(<<Head/binary, Bin/binary>>);
+%% Know how many bytes to extract.
+acc([Len | Acc], Bin) ->
+ acc1(Len, <<Acc/binary, Bin/binary>>);
-%% Or enough to know how many bytes to extract.
-rcv({Len, N, Head, Acc}, Bin) ->
- rcv(Len, N + size(Bin), Head, [Bin | Acc]).
+%% Or not.
+acc(Head, Bin) ->
+ acc(<<Head/binary, Bin/binary>>).
-%% rcv/4
+%% acc1/3
%% Extract a message for which we have all bytes.
-rcv(Len, N, Head, Acc)
- when Len =< N ->
- recv1(Len, bin(Head, Acc));
+acc1(Len, Bin)
+ when Len =< byte_size(Bin) ->
+ split_binary(Bin, Len);
%% Wait for more packets.
-rcv(Len, N, Head, Acc) ->
- {Len, N, Head, Acc}.
-
-%% rcv/1
-
-%% Nothing left.
-rcv(<<>> = Bin) ->
- Bin;
-
-%% The Message Length isn't even sufficient for a header. Chances are
-%% things will go south from here but if we're lucky then the bytes we
-%% have extend to an intended message boundary and we can recover by
-%% simply receiving them. Make it so.
-rcv(<<_:1/binary, Len:24, _/binary>> = Bin)
- when Len < 20 ->
- {Bin, <<>>};
-
-%% Enough bytes to extract a message.
-rcv(<<_:1/binary, Len:24, _/binary>> = Bin)
- when Len =< size(Bin) ->
- recv1(Len, Bin);
-
-%% Or not: wait for more packets.
-rcv(<<_:1/binary, Len:24, _/binary>> = Head) ->
- {Len, size(Head), Head, []};
+acc1(Len, Bin) ->
+ [Len | Bin].
+
+%% acc/1
+
+%% Don't match on Bin since this results in it being copied at the
+%% next append according to the Efficiency Guide. This is also the
+%% reason that the Len is extracted and maintained when accumulating
+%% messages. The simplest implementation is just to accumulate a
+%% binary and match <<_, Len:24, _/binary>> each time the length is
+%% required, but the performance of this decays quadratically with the
+%% message length, since the binary is then copied with each append of
+%% additional bytes from gen_tcp.
+
+acc(Bin)
+ when 3 < byte_size(Bin) ->
+ {Head, _} = split_binary(Bin, 4),
+ [_,A,B,C] = binary_to_list(Head),
+ Len = (A bsl 16) bor (B bsl 8) bor C,
+ if Len < 20 ->
+ %% Message length isn't sufficient for a Diameter Header.
+ %% Chances are things will go south from here but if we're
+ %% lucky then the bytes we have extend to an intended
+ %% message boundary and we can recover by simply receiving
+ %% them. Make it so.
+ {Bin, <<>>};
+ true ->
+ acc1(Len, Bin)
+ end;
%% Not even 4 bytes yet.
-rcv(Head) ->
- Head.
-
-%% recv1/2
-
-recv1(Len, Bin) ->
- <<Msg:Len/binary, Rest/binary>> = Bin,
- {Msg, Rest}.
-
-%% bin/2
-
-bin(Head, Acc) ->
- list_to_binary([Head | lists:reverse(Acc)]).
+acc(Bin) ->
+ Bin.
%% bin/1
-bin({_, _, Head, Acc}) ->
- bin(Head, Acc);
+bin([_ | Bin]) ->
+ Bin;
-bin(Bin)
- when is_binary(Bin) ->
+bin(Bin) ->
Bin.
%% flush/1
@@ -911,14 +906,20 @@ setopts(#transport{socket = Sock,
module = M}
= S)
when B, not A ->
- case setopts(M, Sock, [{active, once}]) of
- ok -> S#transport{active = true};
- X -> x({setopts, Sock, M, X}) %% possibly on peer disconnect
- end;
+ setopts(M, Sock),
+ S#transport{active = true};
setopts(S) ->
S.
+%% setopts/2
+
+setopts(M, Sock) ->
+ case setopts(M, Sock, [{active, once}]) of
+ ok -> ok;
+ X -> x({setopts, Sock, M, X}) %% possibly on peer disconnect
+ end.
+
%% portnr/2
portnr(gen_tcp, Sock) ->
@@ -988,7 +989,7 @@ message(ack, _, #transport{message_cb = false} = S) ->
S;
message(Dir, Msg, #transport{message_cb = CB} = S) ->
- recv(<<>>, actions(cb(CB, Dir, Msg), Dir, S)).
+ setopts(actions(cb(CB, Dir, Msg), Dir, S)).
%% actions/3
diff --git a/lib/diameter/test/diameter_codec_SUITE.erl b/lib/diameter/test/diameter_codec_SUITE.erl
index 9f08f49f9f..17112794e4 100644
--- a/lib/diameter/test/diameter_codec_SUITE.erl
+++ b/lib/diameter/test/diameter_codec_SUITE.erl
@@ -291,7 +291,8 @@ recode(Msg, Dict) ->
recode(#diameter_packet{msg = Msg}, Dict).
opts(Mod) ->
- #{dictionary => Mod,
+ #{app_dictionary => Mod,
+ decode_format => record,
string_decode => false,
strict_mbit => true,
rfc => 6733,
diff --git a/lib/diameter/test/diameter_codec_SUITE_data/diameter_test_unknown.erl b/lib/diameter/test/diameter_codec_SUITE_data/diameter_test_unknown.erl
index 700910878c..c6bba75f09 100644
--- a/lib/diameter/test/diameter_codec_SUITE_data/diameter_test_unknown.erl
+++ b/lib/diameter/test/diameter_codec_SUITE_data/diameter_test_unknown.erl
@@ -77,7 +77,8 @@ dec('BR', #diameter_packet
ok.
opts(Mod) ->
- #{dictionary => Mod,
+ #{app_dictionary => Mod,
+ decode_format => record,
string_decode => true,
strict_mbit => true,
rfc => 6733,
diff --git a/lib/diameter/test/diameter_codec_test.erl b/lib/diameter/test/diameter_codec_test.erl
index b548f85cb8..70e910ffa6 100644
--- a/lib/diameter/test/diameter_codec_test.erl
+++ b/lib/diameter/test/diameter_codec_test.erl
@@ -44,7 +44,8 @@ base() ->
[] = run([[fun base/1, T] || T <- [zero, decode]]).
gen(Mod) ->
- Fs = [{Mod, F, []} || F <- [name, id, vendor_id, vendor_name]],
+ Fs = [{Mod, F, []} || Mod /= diameter_gen_doic_rfc7683,
+ F <- [name, id, vendor_id, vendor_name]],
[] = run(Fs ++ [[fun gen/2, Mod, T] || T <- [messages,
command_codes,
avp_types,
@@ -216,10 +217,11 @@ avp(Mod, encode = X, V, Name, _) ->
opts(Mod) ->
(opts())#{module => Mod,
- dictionary => Mod}.
+ app_dictionary => Mod}.
opts() ->
- #{string_decode => true,
+ #{decode_format => record,
+ string_decode => true,
strict_mbit => true,
rfc => 6733,
failed_avp => false}.
diff --git a/lib/diameter/test/diameter_event_SUITE.erl b/lib/diameter/test/diameter_event_SUITE.erl
index 57d3427037..a291dde6be 100644
--- a/lib/diameter/test/diameter_event_SUITE.erl
+++ b/lib/diameter/test/diameter_event_SUITE.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2013-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2013-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -63,7 +63,8 @@
{'Host-IP-Address', [?ADDR]},
{'Vendor-Id', 12345},
{'Product-Name', "OTP/diameter"},
- {'Acct-Application-Id', [D:id() || D <- Dicts]}
+ {'Acct-Application-Id', [D:id() || D <- Dicts]},
+ {decode_format, map}
| [{application, [{dictionary, D},
{module, #diameter_callback{}}]}
|| D <- Dicts]]).
@@ -111,7 +112,8 @@ up(Config) ->
{Svc, Ref} = connect(Config, [{connect_timer, 5000},
{watchdog_timer, 15000}]),
start = event(Svc),
- {up, Ref, {TPid, Caps}, Cfg, #diameter_packet{}} = event(Svc),
+ {up, Ref, {TPid, Caps}, Cfg, #diameter_packet{msg = M}} = event(Svc),
+ ['CEA' | #{}] = M, %% assert
{watchdog, Ref, _, {initial, okay}, _} = event(Svc),
%% Kill the transport process and see that the connection is
%% reestablished after a watchdog timeout, not after connect_timer
@@ -131,8 +133,9 @@ down(Config) ->
{connect_timer, 5000},
{watchdog_timer, 20000}]),
start = event(Svc),
- {closed, Ref, {'CEA', ?NO_COMMON_APP, _, #diameter_packet{}}, _}
+ {closed, Ref, {'CEA', ?NO_COMMON_APP, _, #diameter_packet{msg = M}}, _}
= event(Svc),
+ ['CEA' | #{}] = M, %% assert
{reconnect, Ref, _} = event(Svc, 4000, 10000).
%% Connect with matching capabilities but have the server delay its
diff --git a/lib/diameter/test/diameter_examples_SUITE.erl b/lib/diameter/test/diameter_examples_SUITE.erl
index eb99f10fe6..ee44ed8dc9 100644
--- a/lib/diameter/test/diameter_examples_SUITE.erl
+++ b/lib/diameter/test/diameter_examples_SUITE.erl
@@ -344,7 +344,7 @@ top(Dir, LibDir) ->
start({server, Prot}) ->
ok = diameter:start(),
ok = server:start(),
- {ok, Ref} = server:listen(Prot),
+ {ok, Ref} = server:listen({Prot, any, 3868}),
[_] = ?util:lport(Prot, Ref),
ok;
@@ -352,7 +352,7 @@ start({client = Svc, Prot}) ->
ok = diameter:start(),
true = diameter:subscribe(Svc),
ok = client:start(),
- {ok, Ref} = client:connect(Prot),
+ {ok, Ref} = client:connect({Prot, loopback, loopback, 3868}),
receive #diameter_event{info = {up, Ref, _, _, _}} -> ok end;
start(Config) ->
diff --git a/lib/diameter/test/diameter_traffic_SUITE.erl b/lib/diameter/test/diameter_traffic_SUITE.erl
index 84b41f14b7..ffb4a508cd 100644
--- a/lib/diameter/test/diameter_traffic_SUITE.erl
+++ b/lib/diameter/test/diameter_traffic_SUITE.erl
@@ -20,6 +20,7 @@
%%
%% Tests of traffic between two Diameter nodes, one client, one server.
+%% The traffic isn't meant to be sensible, just to exercise code.
%%
-module(diameter_traffic_SUITE).
@@ -27,15 +28,18 @@
-export([suite/0,
all/0,
groups/0,
+ init_per_suite/0,
init_per_suite/1,
end_per_suite/1,
+ init_per_group/1,
init_per_group/2,
end_per_group/2,
init_per_testcase/2,
end_per_testcase/2]).
%% testcases
--export([start/1,
+-export([rfc4005/1,
+ start/1,
start_services/1,
add_transports/1,
result_codes/1,
@@ -46,6 +50,7 @@
send_protocol_error/1,
send_experimental_result/1,
send_arbitrary/1,
+ send_proxy_info/1,
send_unknown/1,
send_unknown_short/1,
send_unknown_mandatory/1,
@@ -63,6 +68,7 @@
send_invalid_reject/1,
send_unexpected_mandatory_decode/1,
send_unexpected_mandatory/1,
+ send_too_many/1,
send_long/1,
send_maxlen/1,
send_nopeer/1,
@@ -98,18 +104,20 @@
stop/1]).
%% diameter callbacks
--export([peer_up/3,
- peer_down/3,
- pick_peer/6, pick_peer/7,
- prepare_request/5, prepare_request/6,
- prepare_retransmit/5,
- handle_answer/6, handle_answer/7,
- handle_error/6,
- handle_request/3]).
+-export([peer_up/4,
+ peer_down/4,
+ pick_peer/7, pick_peer/8,
+ prepare_request/6, prepare_request/7,
+ prepare_retransmit/6,
+ handle_answer/7, handle_answer/8,
+ handle_error/7,
+ handle_request/4]).
%% diameter_{tcp,sctp} callbacks
-export([message/3]).
+-include_lib("kernel/include/inet_sctp.hrl").
+
-include("diameter.hrl").
-include("diameter_gen_base_rfc3588.hrl").
-include("diameter_gen_base_accounting.hrl").
@@ -119,13 +127,22 @@
%% ===========================================================================
+%% Fraction of shuffle/parallel groups to randomly skip.
+-define(SKIP, 0.25).
+
+%% Positive number of testcases from which to select (randomly) from
+%% tc(), the list of testcases to run, or [] to run all. The random
+%% selection is to limit the time it takes for the suite to run.
+-define(LIMIT, #{tcp => 42, sctp => 5}).
+
-define(util, diameter_util).
-define(A, list_to_atom).
-define(L, atom_to_list).
+-define(B, iolist_to_binary).
%% Don't use is_record/2 since dictionary hrl's aren't included.
-%% (Since they define conflicting reqcords with the same names.)
+%% (Since they define conflicting records with the same names.)
-define(is_record(Rec, Name), (Name == element(1, Rec))).
-define(ADDR, {127,0,0,1}).
@@ -138,14 +155,14 @@
%% Sequence mask for End-to-End and Hop-by-Hop identifiers.
-define(CLIENT_MASK, {1,26}). %% 1 in top 6 bits
-%% How to construct messages, as record or list.
--define(ENCODINGS, [list, record]).
+%% How to construct outgoing messages.
+-define(ENCODINGS, [list, record, map]).
-%% How to send answers, in a diameter_packet or not.
--define(CONTAINERS, [pkt, msg]).
+%% How to decode incoming messages.
+-define(DECODINGS, [record, none, map, list, record_from_map]).
-%% Which common dictionary to use in the clients.
--define(RFCS, [rfc3588, rfc6733]).
+%% Which dictionary to use in the clients.
+-define(RFCS, [rfc3588, rfc6733, rfc4005]).
%% Whether to decode stringish Diameter types to strings, or leave
%% them as binary.
@@ -163,13 +180,12 @@
-record(group,
{transport,
strings,
+ encoding,
client_service,
- client_encoding,
- client_dict0,
+ client_dict,
client_sender,
server_service,
- server_encoding,
- server_container,
+ server_decoding,
server_sender,
server_throttle}).
@@ -182,34 +198,37 @@
%% A common match when receiving answers in a client.
-define(answer_message(SessionId, ResultCode),
- ['answer-message',
- {'Session-Id', SessionId},
- {'Origin-Host', _},
- {'Origin-Realm', _},
- {'Result-Code', ResultCode}
- | _]).
+ ['answer-message' | #{'Session-Id' := SessionId,
+ 'Origin-Host' := _,
+ 'Origin-Realm' := _,
+ 'Result-Code' := ResultCode}]).
-define(answer_message(ResultCode),
- ?answer_message(_, ResultCode)).
+ ['answer-message' | #{'Origin-Host' := _,
+ 'Origin-Realm' := _,
+ 'Result-Code' := ResultCode}]).
%% Config for diameter:start_service/2.
--define(SERVICE(Name, Decode),
+-define(SERVICE(Name, Grp),
[{'Origin-Host', Name ++ "." ++ ?REALM},
{'Origin-Realm', ?REALM},
{'Host-IP-Address', [?ADDR]},
{'Vendor-Id', 12345},
{'Product-Name', "OTP/diameter"},
- {'Auth-Application-Id', [?DIAMETER_APP_ID_COMMON]},
- {'Acct-Application-Id', [?DIAMETER_APP_ID_ACCOUNTING]},
+ {'Auth-Application-Id', [0]}, %% common messages
+ {'Acct-Application-Id', [3]}, %% base accounting
{restrict_connections, false},
- {string_decode, Decode},
+ {string_decode, Grp#group.strings},
+ {avp_dictionaries, [diameter_gen_doic_rfc7683]},
{incoming_maxlen, 1 bsl 21}
| [{application, [{dictionary, D},
- {module, ?MODULE},
+ {module, [?MODULE, Grp]},
{answer_errors, callback}]}
|| D <- [diameter_gen_base_rfc3588,
diameter_gen_base_accounting,
diameter_gen_base_rfc6733,
- diameter_gen_acct_rfc6733]]]).
+ diameter_gen_acct_rfc6733,
+ nas4005],
+ D /= nas4005 orelse have_nas()]]).
-define(SUCCESS,
?'DIAMETER_BASE_RESULT-CODE_SUCCESS').
@@ -227,6 +246,8 @@
?'DIAMETER_BASE_RESULT-CODE_AVP_UNSUPPORTED').
-define(UNSUPPORTED_VERSION,
?'DIAMETER_BASE_RESULT-CODE_UNSUPPORTED_VERSION').
+-define(TOO_MANY,
+ ?'DIAMETER_BASE_RESULT-CODE_AVP_OCCURS_TOO_MANY_TIMES').
-define(REALM_NOT_SERVED,
?'DIAMETER_BASE_RESULT-CODE_REALM_NOT_SERVED').
-define(UNABLE_TO_DELIVER,
@@ -254,64 +275,75 @@ suite() ->
[{timetrap, {seconds, 10}}].
all() ->
- [start, result_codes, {group, traffic}, empty, stop].
+ [rfc4005, start, result_codes, {group, traffic}, empty, stop].
+%% Redefine this to run one or more groups for debugging purposes.
+-define(GROUPS, []).
+%-define(GROUPS, [[tcp,rfc6733,record,map,false,false,false,false]]).
+
+%% Issues with gen_sctp sporadically cause huge numbers of failed
+%% testcases when running testcases in parallel.
groups() ->
- [{P, [P], Ts} || Ts <- [tc(tc())], P <- [shuffle, parallel]]
+ Names = names(),
+ [{P, [P], Ts} || Ts <- [tc()], P <- [shuffle, parallel]]
++
- [{?util:name([T,R,D,A,C,S,SS,ST,CS]),
- [],
- [{group, if S -> shuffle; not S -> parallel end}]}
- || T <- ?TRANSPORTS,
- R <- ?ENCODINGS,
- D <- ?RFCS,
- A <- ?ENCODINGS,
- C <- ?CONTAINERS,
- S <- ?STRING_DECODES,
- SS <- ?SENDERS,
- ST <- ?CALLBACKS,
- CS <- ?SENDERS]
+ [{?util:name(N), [], [{group, if T == sctp; S -> shuffle;
+ true -> parallel end}]}
+ || [T,_,_,_,S|_] = N <- Names]
++
- [{T, [], groups([[T,R,D,A,C,S,SS,ST,CS]
- || R <- ?ENCODINGS,
- D <- ?RFCS,
- A <- ?ENCODINGS,
- C <- ?CONTAINERS,
- S <- ?STRING_DECODES,
- SS <- ?SENDERS,
- ST <- ?CALLBACKS,
- CS <- ?SENDERS,
- SS orelse CS])} %% avoid deadlock
+ [{T, [], [{group, ?util:name(N)} || N <- names(Names, ?GROUPS),
+ T == hd(N)]}
|| T <- ?TRANSPORTS]
++
[{traffic, [], [{group, T} || T <- ?TRANSPORTS]}].
-%groups(_) -> %% debug
-% Name = [sctp,record,rfc6733,record,pkt,false,false,false,false],
-% [{group, ?util:name(Name)}];
-groups(Names) ->
- [{group, ?util:name(L)} || L <- Names].
+names() ->
+ [[T,R,E,D,S,ST,SS,CS] || T <- ?TRANSPORTS,
+ R <- ?RFCS,
+ E <- ?ENCODINGS,
+ D <- ?DECODINGS,
+ S <- ?STRING_DECODES,
+ ST <- ?CALLBACKS,
+ SS <- ?SENDERS,
+ CS <- ?SENDERS].
+
+names(Names, []) ->
+ [N || N <- Names,
+ [CS,SS|_] <- [lists:reverse(N)],
+ SS orelse CS]; %% avoid deadlock
-%tc([N|_]) -> %% debug
-% [N];
-tc(L) ->
- L.
+names(_, Names) ->
+ Names.
%% --------------------
+init_per_suite() ->
+ [{timetrap, {seconds, 60}}].
+
init_per_suite(Config) ->
- [{sctp, ?util:have_sctp()} | Config].
+ [{rfc4005, compile_and_load()}, {sctp, ?util:have_sctp()} | Config].
end_per_suite(_Config) ->
+ code:delete(nas4005),
+ code:purge(nas4005),
ok.
%% --------------------
+init_per_group(_) ->
+ [{timetrap, {seconds, 30}}].
+
init_per_group(Name, Config)
when Name == shuffle;
Name == parallel ->
- start_services(Config),
- add_transports(Config);
+ case rand:uniform() < ?SKIP of
+ true ->
+ {skip, random};
+ false ->
+ start_services(Config),
+ add_transports(Config),
+ replace({sleep, Name == parallel}, Config)
+ end;
init_per_group(sctp = Name, Config) ->
{_, Sctp} = lists:keyfind(Name, 1, Config),
@@ -322,24 +354,22 @@ init_per_group(sctp = Name, Config) ->
end;
init_per_group(Name, Config) ->
+ Nas = proplists:get_value(rfc4005, Config, false),
case ?util:name(Name) of
- [T,R,D,A,C,S,SS,ST,CS] ->
+ [_,R,_,_,_,_,_,_] when R == rfc4005, true /= Nas ->
+ {skip, rfc4005};
+ [T,R,E,D,S,ST,SS,CS] ->
G = #group{transport = T,
strings = S,
+ encoding = E,
client_service = [$C|?util:unique_string()],
- client_encoding = R,
- client_dict0 = dict0(D),
+ client_dict = appdict(R),
client_sender = CS,
server_service = [$S|?util:unique_string()],
- server_encoding = A,
- server_container = C,
+ server_decoding = D,
server_sender = SS,
server_throttle = ST},
- %% Limit the number of testcase, since the number of
- %% groups is large.
- All = ?util:scramble(tc()),
- TCs = lists:sublist(All, rand:uniform(32)),
- [{group, G}, {runlist, TCs} | Config];
+ replace([{group, G}, {runlist, select(T)}], Config);
_ ->
Config
end.
@@ -353,8 +383,26 @@ end_per_group(Name, Config)
end_per_group(_, _) ->
ok.
+select(T) ->
+ try maps:get(T, ?LIMIT) of
+ N ->
+ lists:sublist(?util:scramble(tc()), max(5, rand:uniform(N)))
+ catch
+ error:_ -> ?LIMIT
+ end.
+
%% --------------------
+%% Work around common_test accumulating Config improperly, causing
+%% testcases to get Config from groups and suites they're not in.
+init_per_testcase(N, Config)
+ when N == rfc4005;
+ N == start;
+ N == result_codes;
+ N == empty;
+ N == stop ->
+ Config;
+
%% Skip testcases that can reasonably fail under SCTP.
init_per_testcase(Name, Config) ->
TCs = proplists:get_value(runlist, Config, []),
@@ -368,12 +416,26 @@ init_per_testcase(Name, Config) ->
_ when not Run ->
{skip, random};
_ ->
+ proplists:get_value(sleep, Config, false)
+ andalso timer:sleep(rand:uniform(200)),
[{testcase, Name} | Config]
end.
end_per_testcase(_, _) ->
ok.
+%% replace/2
+%%
+%% Work around common_test running init functions inappropriately, and
+%% this accumulating more config than expected.
+
+replace(Pairs, Config)
+ when is_list(Pairs) ->
+ lists:foldl(fun replace/2, Config, Pairs);
+
+replace({Key, _} = T, Config) ->
+ [T | lists:keydelete(Key, 1, Config)].
+
%% --------------------
%% Testcases to run when services are started and connections
@@ -386,6 +448,7 @@ tc() ->
send_protocol_error,
send_experimental_result,
send_arbitrary,
+ send_proxy_info,
send_unknown,
send_unknown_short,
send_unknown_mandatory,
@@ -403,6 +466,7 @@ tc() ->
send_invalid_reject,
send_unexpected_mandatory_decode,
send_unexpected_mandatory,
+ send_too_many,
send_long,
send_maxlen,
send_nopeer,
@@ -440,16 +504,26 @@ start(_Config) ->
ok = diameter:start().
start_services(Config) ->
- #group{strings = S,
- client_service = CN,
- server_service = SN}
+ #group{client_service = CN,
+ server_service = SN,
+ server_decoding = SD}
+ = Grp
= group(Config),
- ok = diameter:start_service(SN, ?SERVICE(SN, S)),
- ok = diameter:start_service(CN, [{sequence, ?CLIENT_MASK}
- | ?SERVICE(CN, S)]).
+ ok = diameter:start_service(SN, [{traffic_counters, bool()},
+ {decode_format, SD}
+ | ?SERVICE(SN, Grp)]),
+ ok = diameter:start_service(CN, [{traffic_counters, bool()},
+ {sequence, ?CLIENT_MASK},
+ {decode_format, map},
+ {strict_arities, decode}
+ | ?SERVICE(CN, Grp)]).
+
+bool() ->
+ 0.5 =< rand:uniform().
add_transports(Config) ->
#group{transport = T,
+ encoding = E,
client_service = CN,
client_sender = CS,
server_service = SN,
@@ -459,30 +533,54 @@ add_transports(Config) ->
LRef = ?util:listen(SN,
[T,
{sender, SS},
- {message_cb, ST andalso {?MODULE, message, [4]}}
+ {message_cb, ST andalso {?MODULE, message, [0]}}
| [{packet, hd(?util:scramble([false, raw]))}
|| T == sctp andalso CS]],
[{capabilities_cb, fun capx/2},
- {pool_size, 8},
- {applications, apps(rfc3588)}]
+ {pool_size, 8}
+ | server_apps()]
++ [{spawn_opt, {erlang, spawn, []}} || CS]),
Cs = [?util:connect(CN,
- [T, {sender, CS}],
+ [T, {sender, CS} | client_opts(T)],
LRef,
- [{id, Id},
- {capabilities, [{'Origin-State-Id', origin(Id)}]},
- {applications, apps(D)}])
- || A <- ?ENCODINGS,
- C <- ?CONTAINERS,
- D <- ?RFCS,
- Id <- [{A,C}]],
- %% The server uses the client's Origin-State-Id to decide how to
- %% answer.
+ [{id, Id}
+ | client_apps(R, [{'Origin-State-Id', origin(Id)}])])
+ || D <- ?DECODINGS, %% for multiple candidate peers
+ R <- ?RFCS,
+ R /= rfc4005 orelse have_nas(),
+ Id <- [{D,E}]],
?util:write_priv(Config, "transport", [LRef | Cs]).
-apps(D0) ->
- D = dict0(D0),
- [acct(D), D].
+client_opts(tcp) ->
+ [];
+client_opts(sctp) ->
+ [{sctp_initmsg, #sctp_initmsg{num_ostreams = N,
+ max_instreams = 5}}
+ || N <- [rand:uniform(8)],
+ N =< 6].
+
+server_apps() ->
+ B = have_nas(),
+ [{applications, [diameter_gen_base_rfc3588,
+ diameter_gen_base_accounting]
+ ++ [nas4005 || B]},
+ {capabilities, [{'Auth-Application-Id', [0] ++ [1 || B]}, %% common, NAS
+ {'Acct-Application-Id', [3]}]}]. %% accounting
+
+client_apps(D, Caps) ->
+ if D == rfc4005 ->
+ [{applications, [nas4005]},
+ {capabilities, [{'Auth-Application-Id', [1]}, %% NAS
+ {'Acct-Application-Id', []}
+ | Caps]}];
+ true ->
+ D0 = dict0(D),
+ [{applications, [acct(D0), D0]},
+ {capabilities, Caps}]
+ end.
+
+have_nas() ->
+ false /= code:is_loaded(nas4005).
remove_transports(Config) ->
#group{client_service = CN,
@@ -515,9 +613,16 @@ capx(_, #diameter_caps{origin_host = {OH,DH}}) ->
%% ===========================================================================
+%% Fail only this testcase if the RFC 4005 dictionary hasn't been
+%% successfully compiled and loaded.
+rfc4005(Config) ->
+ true = proplists:get_value(rfc4005, Config).
+
%% Ensure that result codes have the expected values.
result_codes(_Config) ->
- {2001, 3001, 3002, 3003, 3004, 3007, 3008, 3009, 5001, 5011, 5014}
+ {2001,
+ 3001, 3002, 3003, 3004, 3007, 3008, 3009,
+ 5001, 5009, 5011, 5014}
= {?SUCCESS,
?COMMAND_UNSUPPORTED,
?UNABLE_TO_DELIVER,
@@ -527,6 +632,7 @@ result_codes(_Config) ->
?INVALID_HDR_BITS,
?INVALID_AVP_BITS,
?AVP_UNSUPPORTED,
+ ?TOO_MANY,
?UNSUPPORTED_VERSION,
?INVALID_AVP_LENGTH}.
@@ -534,8 +640,8 @@ result_codes(_Config) ->
send_ok(Config) ->
Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
{'Accounting-Record-Number', 1}],
-
- ['ACA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['ACA' | #{'Result-Code' := ?SUCCESS,
+ 'Session-Id' := _}]
= call(Config, Req).
%% Send an accounting ACR that the server answers badly to.
@@ -551,7 +657,8 @@ send_eval(Config) ->
Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
{'Accounting-Record-Number', 3}],
- ['ACA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['ACA' | #{'Result-Code' := ?SUCCESS,
+ 'Session-Id' := _}]
= call(Config, Req).
%% Send an accounting ACR that the server tries to answer with an
@@ -564,20 +671,87 @@ send_bad_answer(Config) ->
= call(Config, Req).
%% Send an ACR that the server callback answers explicitly with a
-%% protocol error.
+%% protocol error and some AVPs to check the decoding of.
send_protocol_error(Config) ->
Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
{'Accounting-Record-Number', 4}],
- ?answer_message(?TOO_BUSY)
- = call(Config, Req).
+ ['answer-message' | #{'Result-Code' := ?TOO_BUSY,
+ 'AVP' := [OLR | _]} = Avps]
+ = call(Config, Req),
+
+ #diameter_avp{name = 'OC-OLR',
+ value = #{'OC-Sequence-Number' := 1,
+ 'OC-Report-Type' := 0, %% HOST_REPORT
+ 'OC-Reduction-Percentage' := [25],
+ 'OC-Validity-Duration' := [60],
+ 'AVP' := [OSF]}}
+ = OLR,
+ #diameter_avp{name = 'OC-Supported-Features',
+ value = #{} = Fs}
+ = OSF,
+ 0 = maps:size(Fs),
+
+ #group{client_dict = D} = group(Config),
+
+ if D == nas4005 ->
+ error = maps:find('Failed-AVP', Avps),
+ #{'AVP' := [_,Failed]}
+ = Avps,
+ #diameter_avp{name = 'Failed-AVP',
+ value = #{'AVP' := [NP,FR,AP]}}
+ = Failed,
+ #diameter_avp{name = 'NAS-Port',
+ value = 44}
+ = NP,
+ #diameter_avp{name = 'Firmware-Revision',
+ value = 12}
+ = FR,
+ #diameter_avp{name = 'Auth-Grace-Period',
+ value = 13}
+ = AP;
+
+ D == diameter_gen_base_rfc3588;
+ D == diameter_gen_basr_accounting ->
+ error = maps:find('Failed-AVP', Avps),
+ #{'AVP' := [_,Failed]}
+ = Avps,
+
+ #diameter_avp{name = 'Failed-AVP',
+ value = #{'AVP' := [NP,FR,AP]}}
+ = Failed,
+ #diameter_avp{name = undefined,
+ value = undefined}
+ = NP,
+ #diameter_avp{name = 'Firmware-Revision',
+ value = 12}
+ = FR,
+ #diameter_avp{name = 'Auth-Grace-Period',
+ value = 13}
+ = AP;
+
+ D == diameter_gen_base_rfc6733;
+ D == diameter_gen_acct_rfc6733 ->
+ #{'Failed-AVP' := [#{'AVP' := [NP,FR,AP]}],
+ 'AVP' := [_]}
+ = Avps,
+ #diameter_avp{name = undefined,
+ value = undefined}
+ = NP,
+ #diameter_avp{name = 'Firmware-Revision',
+ value = 12}
+ = FR,
+ #diameter_avp{name = 'Auth-Grace-Period',
+ value = 13}
+ = AP
+ end.
%% Send a 3xxx Experimental-Result in an answer not setting the E-bit
%% and missing a Result-Code.
send_experimental_result(Config) ->
Req = ['ACR', {'Accounting-Record-Type', ?EVENT_RECORD},
{'Accounting-Record-Number', 5}],
- ['ACA', {'Session-Id', _} | _]
+ ['ACA' | #{'Session-Id' := _}]
= call(Config, Req).
%% Send an ASR with an arbitrary non-mandatory AVP and expect success
@@ -585,24 +759,37 @@ send_experimental_result(Config) ->
send_arbitrary(Config) ->
Req = ['ASR', {'AVP', [#diameter_avp{name = 'Product-Name',
value = "XXX"}]}],
- ['ASA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | Avps]
+ ['ASA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS,
+ 'AVP' := [#diameter_avp{name = 'Product-Name',
+ value = V}]}]
= call(Config, Req),
- {'AVP', [#diameter_avp{name = 'Product-Name',
- value = V}]}
- = lists:last(Avps),
"XXX" = string(V, Config).
+%% Send Proxy-Info in an ASR that the peer answers with 3xxx, and
+%% ensure that the AVP is returned.
+send_proxy_info(Config) ->
+ H0 = ?B(?util:unique_string()),
+ S0 = ?B(?util:unique_string()),
+ Req = ['ASR', {'Proxy-Info', #{'Proxy-Host' => H0,
+ 'Proxy-State' => S0}}],
+ ['answer-message' | #{'Result-Code' := 3999,
+ 'Proxy-Info' := [#{'Proxy-Host' := H,
+ 'Proxy-State' := S}]}]
+ = call(Config, Req),
+ [H0, S0] = [?B(X) || X <- [H,S]].
+
%% Send an unknown AVP (to some client) and check that it comes back.
send_unknown(Config) ->
Req = ['ASR', {'AVP', [#diameter_avp{code = 999,
is_mandatory = false,
data = <<17>>}]}],
- ['ASA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | Avps]
- = call(Config, Req),
- {'AVP', [#diameter_avp{code = 999,
- is_mandatory = false,
- data = <<17>>}]}
- = lists:last(Avps).
+ ['ASA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS,
+ 'AVP' := [#diameter_avp{code = 999,
+ is_mandatory = false,
+ data = <<17>>}]}]
+ = call(Config, Req).
%% Ditto, and point the AVP length past the end of the message. Expect
%% 5014.
@@ -613,28 +800,28 @@ send_unknown_short(Config, M, RC) ->
Req = ['ASR', {'AVP', [#diameter_avp{code = 999,
is_mandatory = M,
data = <<17>>}]}],
- ['ASA', {'Session-Id', _}, {'Result-Code', RC} | Avps]
+ ['ASA' | #{'Session-Id' := _,
+ 'Result-Code' := RC,
+ 'Failed-AVP' := [#{'AVP' := [Avp]}]}]
= call(Config, Req),
- [#'diameter_base_Failed-AVP'{'AVP' = As}]
- = proplists:get_value('Failed-AVP', Avps),
- [#diameter_avp{code = 999,
- is_mandatory = M,
- data = <<17, _/binary>>}] %% extra bits from padding
- = As.
+ #diameter_avp{code = 999,
+ is_mandatory = M,
+ data = <<17, _/binary>>} %% extra bits from padding
+ = Avp.
%% Ditto but set the M flag.
send_unknown_mandatory(Config) ->
Req = ['ASR', {'AVP', [#diameter_avp{code = 999,
is_mandatory = true,
data = <<17>>}]}],
- ['ASA', {'Session-Id', _}, {'Result-Code', ?AVP_UNSUPPORTED} | Avps]
+ ['ASA' | #{'Session-Id' := _,
+ 'Result-Code' := ?AVP_UNSUPPORTED,
+ 'Failed-AVP' := [#{'AVP' := [Avp]}]}]
= call(Config, Req),
- [#'diameter_base_Failed-AVP'{'AVP' = As}]
- = proplists:get_value('Failed-AVP', Avps),
- [#diameter_avp{code = 999,
- is_mandatory = true,
- data = <<17>>}]
- = As.
+ #diameter_avp{code = 999,
+ is_mandatory = true,
+ data = <<17>>}
+ = Avp.
%% Ditto, and point the AVP length past the end of the message. Expect
%% 5014 instead of 5001.
@@ -647,15 +834,27 @@ send_unexpected_mandatory_decode(Config) ->
Req = ['ASR', {'AVP', [#diameter_avp{code = 27, %% Session-Timeout
is_mandatory = true,
data = <<12:32>>}]}],
- ['ASA', {'Session-Id', _}, {'Result-Code', ?AVP_UNSUPPORTED} | Avps]
+ ['ASA' | #{'Session-Id' := _,
+ 'Result-Code' := ?AVP_UNSUPPORTED,
+ 'Failed-AVP' := [#{'AVP' := [Avp]}]}]
= call(Config, Req),
- [#'diameter_base_Failed-AVP'{'AVP' = As}]
- = proplists:get_value('Failed-AVP', Avps),
- [#diameter_avp{code = 27,
- is_mandatory = true,
- value = 12,
- data = <<12:32>>}]
- = As.
+ #diameter_avp{code = 27,
+ is_mandatory = true,
+ value = 12,
+ data = <<12:32>>}
+ = Avp.
+
+%% Try to two Auth-Application-Id in ASR expect 5009.
+send_too_many(Config) ->
+ Req = ['ASR', {'Auth-Application-Id', [?APP_ID, 44]}],
+
+ ['ASA' | #{'Session-Id' := _,
+ 'Result-Code' := ?TOO_MANY,
+ 'Failed-AVP' := [#{'AVP' := [Avp]}]}]
+ = call(Config, Req),
+ #diameter_avp{name = 'Auth-Application-Id',
+ value = 44}
+ = Avp.
%% Send an containing a faulty Grouped AVP (empty Proxy-Host in
%% Proxy-Info) and expect that only the faulty AVP is sent in
@@ -665,16 +864,13 @@ send_unexpected_mandatory_decode(Config) ->
send_grouped_error(Config) ->
Req = ['ASR', {'Proxy-Info', [[{'Proxy-Host', "abcd"},
{'Proxy-State', ""}]]}],
- ['ASA', {'Session-Id', _}, {'Result-Code', ?INVALID_AVP_LENGTH} | Avps]
+ ['ASA' | #{'Session-Id' := _,
+ 'Result-Code' := ?INVALID_AVP_LENGTH,
+ 'Failed-AVP' := [#{'AVP' := [Avp]}]}]
= call(Config, Req),
- [#'diameter_base_Failed-AVP'{'AVP' = As}]
- = proplists:get_value('Failed-AVP', Avps),
- [#diameter_avp{name = 'Proxy-Info',
- value = #'diameter_base_Proxy-Info'
- {'Proxy-Host' = Empty,
- 'Proxy-State' = undefined}}]
- = As,
- <<0>> = iolist_to_binary(Empty).
+ #diameter_avp{name = 'Proxy-Info', value = #{'Proxy-Host' := H}}
+ = Avp,
+ <<0>> = ?B(H).
%% Send an STR that the server ignores.
send_noreply(Config) ->
@@ -702,7 +898,8 @@ send_error_bit(Config) ->
%% Send a bad version and check that we get 5011.
send_unsupported_version(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- ['STA', {'Session-Id', _}, {'Result-Code', ?UNSUPPORTED_VERSION} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?UNSUPPORTED_VERSION}]
= call(Config, Req).
%% Send a request containing an AVP length > data size.
@@ -722,16 +919,11 @@ send_zero_avp_length(Config) ->
send_invalid_avp_length(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- ['STA', {'Session-Id', _},
- {'Result-Code', ?INVALID_AVP_LENGTH},
- {'Origin-Host', _},
- {'Origin-Realm', _},
- {'User-Name', _},
- {'Class', _},
- {'Error-Message', _},
- {'Error-Reporting-Host', _},
- {'Failed-AVP', [#'diameter_base_Failed-AVP'{'AVP' = [_]}]}
- | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?INVALID_AVP_LENGTH,
+ 'Origin-Host' := _,
+ 'Origin-Realm' := _,
+ 'Failed-AVP' := [#{'AVP' := [_]}]}]
= call(Config, Req).
%% Send a request containing 5xxx errors that the server rejects with
@@ -747,14 +939,16 @@ send_invalid_reject(Config) ->
send_unexpected_mandatory(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- ['STA', {'Session-Id', _}, {'Result-Code', ?AVP_UNSUPPORTED} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?AVP_UNSUPPORTED}]
= call(Config, Req).
%% Send something long that will be fragmented by TCP.
send_long(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT},
{'User-Name', [binary:copy(<<$X>>, 1 bsl 20)]}],
- ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS}]
= call(Config, Req).
%% Send something longer than the configure incoming_maxlen.
@@ -797,7 +991,8 @@ send_any_2(Config) ->
send_all_1(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
Realm = lists:foldr(fun(C,A) -> [C,A] end, [], ?REALM),
- ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS}]
= call(Config, Req, [{filter, {all, [{host, any},
{realm, Realm}]}}]).
send_all_2(Config) ->
@@ -826,13 +1021,13 @@ send_detach(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
Ref = make_ref(),
ok = call(Config, Req, [{extra, [{self(), Ref}]}, detach]),
- Ans = receive {Ref, T} -> T end,
- ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
- = Ans.
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS}]
+ = receive {Ref, T} -> T end.
%% Send a request which can't be encoded and expect {error, encode}.
send_encode_error(Config) ->
- {error, encode} = call(Config, ['STR']). %% No Termination-Cause
+ {error, encode} = call(Config, ['STR', {'Termination-Cause', huh}]).
%% Send with filtering and expect success.
send_destination_1(Config) ->
@@ -840,25 +1035,27 @@ send_destination_1(Config) ->
= group(Config),
Req = ['STR', {'Termination-Cause', ?LOGOUT},
{'Destination-Host', [?HOST(SN, ?REALM)]}],
- ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS}]
= call(Config, Req, [{filter, {all, [host, realm]}}]).
send_destination_2(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT}],
- ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS}]
= call(Config, Req, [{filter, {all, [host, realm]}}]).
%% Send with filtering on and expect failure when specifying an
%% unknown host or realm.
send_destination_3(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT},
- {'Destination-Realm', "unknown.org"}],
+ {'Destination-Realm', <<"unknown.org">>}],
{error, no_connection}
= call(Config, Req, [{filter, {all, [host, realm]}}]).
send_destination_4(Config) ->
#group{server_service = SN}
= group(Config),
Req = ['STR', {'Termination-Cause', ?LOGOUT},
- {'Destination-Host', [?HOST(SN, "unknown.org")]}],
+ {'Destination-Host', [?HOST(SN, ["unknown.org"])]}],
{error, no_connection}
= call(Config, Req, [{filter, {all, [host, realm]}}]).
@@ -866,7 +1063,7 @@ send_destination_4(Config) ->
%% an unknown host or realm.
send_destination_5(Config) ->
Req = ['STR', {'Termination-Cause', ?LOGOUT},
- {'Destination-Realm', "unknown.org"}],
+ {'Destination-Realm', [<<"unknown.org">>]}],
?answer_message(?REALM_NOT_SERVED)
= call(Config, Req).
send_destination_6(Config) ->
@@ -908,7 +1105,8 @@ send_bad_filter(Config, F) ->
%% Specify multiple filter options and expect them be conjunctive.
send_multiple_filters_1(Config) ->
Fun = fun(#diameter_caps{}) -> true end,
- ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS}]
= send_multiple_filters(Config, [host, {eval, Fun}]).
send_multiple_filters_2(Config) ->
E = {erlang, is_tuple, []},
@@ -919,7 +1117,8 @@ send_multiple_filters_3(Config) ->
E2 = {erlang, is_tuple, []},
E3 = {erlang, is_record, [diameter_caps]},
E4 = [{erlang, is_record, []}, diameter_caps],
- ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS}]
= send_multiple_filters(Config, [{eval, E} || E <- [E1,E2,E3,E4]]).
send_multiple_filters(Config, Fs) ->
@@ -930,7 +1129,8 @@ send_multiple_filters(Config, Fs) ->
%% only the return value from the prepare_request callback being
%% significant.
send_anything(Config) ->
- ['STA', {'Session-Id', _}, {'Result-Code', ?SUCCESS} | _]
+ ['STA' | #{'Session-Id' := _,
+ 'Result-Code' := ?SUCCESS}]
= call(Config, anything).
%% ===========================================================================
@@ -954,58 +1154,137 @@ call(Config, Req) ->
call(Config, Req, Opts) ->
Name = proplists:get_value(testcase, Config),
- #group{client_service = CN,
- client_encoding = ReqEncoding,
- client_dict0 = Dict0}
- = Group
+ #group{encoding = Enc,
+ client_service = CN,
+ client_dict = Dict0}
= group(Config),
diameter:call(CN,
dict(Req, Dict0),
- msg(Req, ReqEncoding, Dict0),
- [{extra, [{Name, Group}, diameter_lib:now()]} | Opts]).
+ msg(Req, Enc, Dict0),
+ [{extra, [Name, diameter_lib:now()]} | Opts]).
-origin({A,C}) ->
- 2*codec(A) + container(C);
+origin({D,E}) ->
+ 4*decode(D) + encode(E);
origin(N) ->
- {codec(N band 2), container(N rem 2)}.
-
-%% Map booleans, but the readable atoms are part of (constructed)
-%% group names, so it's good that they're readable.
-
-codec(record) -> 0;
-codec(list) -> 1;
-codec(0) -> record;
-codec(_) -> list.
-
-container(pkt) -> 0;
-container(msg) -> 1;
-container(0) -> pkt;
-container(_) -> msg.
+ {decode(N bsr 2), encode(N rem 4)}.
+
+%% Map atoms. The atoms are part of (constructed) group names, so it's
+%% good that they're readable.
+
+decode(record) -> 0;
+decode(list) -> 1;
+decode(map) -> 2;
+decode(none) -> 3;
+decode(record_from_map) -> 4;
+decode(0) -> record;
+decode(1) -> list;
+decode(2) -> map;
+decode(3) -> none;
+decode(4) -> record_from_map.
+
+encode(record) -> 0;
+encode(list) -> 1;
+encode(map) -> 2;
+encode(0) -> record;
+encode(1) -> list;
+encode(2) -> map.
msg([H|_] = Msg, record = E, diameter_gen_base_rfc3588)
when H == 'ACR';
H == 'ACA' ->
msg(Msg, E, diameter_gen_base_accounting);
+
msg([H|_] = Msg, record = E, diameter_gen_base_rfc6733)
when H == 'ACR';
H == 'ACA' ->
msg(Msg, E, diameter_gen_acct_rfc6733);
+
msg([H|T], record, Dict) ->
Dict:'#new-'(Dict:msg2rec(H), T);
+
+msg([H|As], map, _)
+ when is_list(As) ->
+ [H | maps:from_list(As)];
+
msg(Msg, _, _) ->
Msg.
+to_map(#diameter_packet{msg = [_MsgName | Avps] = Msg},
+ #group{server_decoding = map})
+ when is_map(Avps) ->
+ Msg;
+
+to_map(#diameter_packet{msg = [MsgName | Avps]},
+ #group{server_decoding = list}) ->
+ [MsgName | maps:from_list(Avps)];
+
+to_map(#diameter_packet{header = H, msg = Rec},
+ #group{server_decoding = D})
+ when D == record;
+ D == record_from_map ->
+ rec_to_map(Rec, dict(H));
+
+%% No record decode: do it ourselves.
+to_map(#diameter_packet{header = H,
+ msg = Name,
+ bin = Bin},
+ #group{server_decoding = none,
+ strings = B}) ->
+ Opts = #{decode_format => map,
+ string_decode => B,
+ avp_dictionaries => [diameter_gen_doic_rfc7683],
+ strict_mbit => true,
+ rfc => 6733},
+ #diameter_packet{msg = [MsgName | _Map] = Msg}
+ = diameter_codec:decode(dict(H), Opts, Bin),
+ {MsgName, _} = {Name, Msg}, %% assert
+ Msg.
+
+dict(#diameter_header{application_id = Id,
+ cmd_code = Code}) ->
+ if Id == 1 ->
+ nas4005;
+ Code == 271 ->
+ diameter_gen_base_accounting;
+ true ->
+ diameter_gen_base_rfc3588
+ end.
+
+rec_to_map(Rec, Dict) ->
+ [R | Vs] = Dict:'#get-'(Rec),
+ [Dict:rec2msg(R) | maps:from_list([T || {_,V} = T <- Vs,
+ V /= undefined,
+ V /= []])].
+
+appdict(rfc4005) ->
+ nas4005;
+appdict(D) ->
+ dict0(D).
+
dict0(D) ->
?A("diameter_gen_base_" ++ ?L(D)).
-dict(Msg, Dict0)
- when 'ACR' == hd(Msg);
- 'ACA' == hd(Msg);
- ?is_record(Msg, diameter_base_accounting_ACR);
- ?is_record(Msg, diameter_base_accounting_ACA) ->
+dict(Msg, Dict) ->
+ d(name(Msg), Dict).
+
+d(N, nas4005 = D) ->
+ if N == {list, 'answer-message'};
+ N == {map, 'answer-message'};
+ N == {record, 'diameter_base_answer-message'} ->
+ diameter_gen_base_rfc3588;
+ true ->
+ D
+ end;
+d(N, Dict0)
+ when N == {list, 'ACR'};
+ N == {list, 'ACA'};
+ N == {map, 'ACR'};
+ N == {map, 'ACA'};
+ N == {record, diameter_base_accounting_ACR};
+ N == {record, diameter_base_accounting_ACA} ->
acct(Dict0);
-dict(_, Dict0) ->
+d(_, Dict0) ->
Dict0.
acct(diameter_gen_base_rfc3588) ->
@@ -1014,53 +1293,60 @@ acct(diameter_gen_base_rfc6733) ->
diameter_gen_acct_rfc6733.
%% Set only values that aren't already.
-set(_, [H|T], Vs) ->
- [H | Vs ++ T];
-set(#group{client_dict0 = Dict0} = _Group, Rec, Vs) ->
+
+set(_, [N | As], Vs) ->
+ [N | if is_map(As) ->
+ maps:merge(maps:from_list(Vs), As);
+ is_list(As) ->
+ Vs ++ As
+ end];
+
+set(#group{client_dict = Dict0} = _Group, Rec, Vs) ->
Dict = dict(Rec, Dict0),
lists:foldl(fun({F,_} = FV, A) ->
- set(Dict, Dict:'#get-'(F, A), FV, A)
+ reset(Dict, Dict:'#get-'(F, A), FV, A)
end,
Rec,
Vs).
-set(Dict, E, FV, Rec)
+reset(Dict, E, FV, Rec)
when E == undefined;
E == [] ->
Dict:'#set-'(FV, Rec);
-set(_, _, _, Rec) ->
+
+reset(_, _, _, Rec) ->
Rec.
%% ===========================================================================
%% diameter callbacks
-%% peer_up/3
+%% peer_up/4
-peer_up(_SvcName, _Peer, State) ->
+peer_up(_SvcName, _Peer, State, _Group) ->
State.
%% peer_down/3
-peer_down(_SvcName, _Peer, State) ->
+peer_down(_SvcName, _Peer, State, _Group) ->
State.
-%% pick_peer/6-7
+%% pick_peer/7-8
-pick_peer(Peers, _, [$C|_], _State, {Name, Group}, _)
+pick_peer(Peers, _, [$C|_], _State, Group, Name, _)
when Name /= send_detach ->
find(Group, Peers).
-pick_peer(_Peers, _, [$C|_], _State, {send_nopeer, _}, _, ?EXTRA) ->
+pick_peer(_Peers, _, [$C|_], _State, _Group, send_nopeer, _, ?EXTRA) ->
false;
-pick_peer(Peers, _, [$C|_], _State, {send_detach, Group}, _, {_,_}) ->
+pick_peer(Peers, _, [$C|_], _State, Group, send_detach, _, {_,_}) ->
find(Group, Peers).
-find(#group{client_service = CN,
- server_encoding = A,
- server_container = C},
+find(#group{encoding = E,
+ client_service = CN,
+ server_decoding = D},
[_|_] = Peers) ->
- Id = {A,C},
+ Id = {D,E},
[P] = [P || P <- Peers, id(Id, P, CN)],
{ok, P}.
@@ -1069,15 +1355,15 @@ id(Id, {Pid, _Caps}, SvcName) ->
= diameter:service_info(SvcName, Pid),
lists:member({id, Id}, Opts).
-%% prepare_request/5-6
+%% prepare_request/6-7
-prepare_request(_Pkt, [$C|_], {_Ref, _Caps}, {send_discard, _}, _) ->
+prepare_request(_Pkt, [$C|_], {_Ref, _Caps}, _, send_discard, _) ->
{discard, unprepared};
-prepare_request(Pkt, [$C|_], {_Ref, Caps}, {Name, Group}, _) ->
+prepare_request(Pkt, [$C|_], {_Ref, Caps}, Group, Name, _) ->
{send, prepare(Pkt, Caps, Name, Group)}.
-prepare_request(Pkt, [$C|_], {_Ref, Caps}, {send_detach, Group}, _, _) ->
+prepare_request(Pkt, [$C|_], {_Ref, Caps}, Group, send_detach, _, _) ->
{eval_packet, {send, prepare(Pkt, Caps, Group)}, [fun log/2, detach]}.
log(#diameter_packet{bin = Bin} = P, T)
@@ -1086,7 +1372,7 @@ log(#diameter_packet{bin = Bin} = P, T)
%% prepare/4
-prepare(Pkt, Caps, N, #group{client_dict0 = Dict0} = Group)
+prepare(Pkt, Caps, N, #group{client_dict = Dict0} = Group)
when N == send_unknown_short_mandatory;
N == send_unknown_short ->
Req = prepare(Pkt, Caps, Group),
@@ -1106,7 +1392,7 @@ prepare(Pkt, Caps, N, #group{client_dict0 = Dict0} = Group)
<<H:Offset/binary, Len:24, T/binary>> = Bin,
E#diameter_packet{bin = <<H/binary, (Len+9):24, T/binary>>};
-prepare(Pkt, Caps, N, #group{client_dict0 = Dict0} = Group)
+prepare(Pkt, Caps, N, #group{client_dict = Dict0} = Group)
when N == send_long_avp_length;
N == send_short_avp_length;
N == send_zero_avp_length ->
@@ -1132,7 +1418,7 @@ prepare(Pkt, Caps, N, #group{client_dict0 = Dict0} = Group)
T/binary,
Hdr/binary, AL:24, Data/binary>>};
-prepare(Pkt, Caps, N, #group{client_dict0 = Dict0} = Group)
+prepare(Pkt, Caps, N, #group{client_dict = Dict0} = Group)
when N == send_invalid_avp_length;
N == send_invalid_reject ->
Req = prepare(Pkt, Caps, Group),
@@ -1147,7 +1433,7 @@ prepare(Pkt, Caps, N, #group{client_dict0 = Dict0} = Group)
<<V, L:24, H/binary>> = H0, %% assert
E#diameter_packet{bin = <<V, (L+4):24, H/binary, 16:24, 0:32, T/binary>>};
-prepare(Pkt, Caps, send_unexpected_mandatory, #group{client_dict0 = Dict0}
+prepare(Pkt, Caps, send_unexpected_mandatory, #group{client_dict = Dict0}
= Group) ->
Req = prepare(Pkt, Caps, Group),
#diameter_packet{bin = <<V, Len:24, T/binary>>}
@@ -1157,7 +1443,7 @@ prepare(Pkt, Caps, send_unexpected_mandatory, #group{client_dict0 = Dict0}
Avp = <<Code:32, Flags, 8:24>>,
E#diameter_packet{bin = <<V, (Len+8):24, T/binary, Avp/binary>>};
-prepare(Pkt, Caps, send_grouped_error, #group{client_dict0 = Dict0}
+prepare(Pkt, Caps, send_grouped_error, #group{client_dict = Dict0}
= Group) ->
Req = prepare(Pkt, Caps, Group),
#diameter_packet{bin = Bin}
@@ -1189,14 +1475,14 @@ prepare(Pkt, Caps, send_grouped_error, #group{client_dict0 = Dict0}
Payload/binary,
T/binary>>};
-prepare(Pkt, Caps, send_unsupported, #group{client_dict0 = Dict0} = Group) ->
+prepare(Pkt, Caps, send_unsupported, #group{client_dict = Dict0} = Group) ->
Req = prepare(Pkt, Caps, Group),
#diameter_packet{bin = <<H:5/binary, _CmdCode:3/binary, T/binary>>}
= E
= diameter_codec:encode(Dict0, Pkt#diameter_packet{msg = Req}),
E#diameter_packet{bin = <<H/binary, 42:24, T/binary>>};
-prepare(Pkt, Caps, send_unsupported_app, #group{client_dict0 = Dict0}
+prepare(Pkt, Caps, send_unsupported_app, #group{client_dict = Dict0}
= Group) ->
Req = prepare(Pkt, Caps, Group),
#diameter_packet{bin = <<H:8/binary, _ApplId:4/binary, T/binary>>}
@@ -1223,93 +1509,120 @@ prepare(Pkt, Caps, _Name, Group) ->
%% prepare/3
-prepare(#diameter_packet{msg = Req}, Caps, Group)
- when ?is_record(Req, diameter_base_accounting_ACR);
- 'ACR' == hd(Req) ->
+prepare(#diameter_packet{msg = Req} = Pkt, Caps, Group) ->
+ set(name(Req), Pkt, Caps, Group).
+
+%% set/4
+
+set(N, #diameter_packet{msg = Req}, Caps, Group)
+ when N == {record, diameter_base_accounting_ACR};
+ N == {record, nas_ACR};
+ N == {map, 'ACR'};
+ N == {list, 'ACR'} ->
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, DR}}
= Caps,
- set(Group, Req, [{'Session-Id', diameter:session_id(OH)},
- {'Origin-Host', OH},
- {'Origin-Realm', OR},
- {'Destination-Realm', DR}]);
+ set(Group, Req, [{'Session-Id', [diameter:session_id(OH)]},
+ {'Origin-Host', [OH]},
+ {'Origin-Realm', [OR]},
+ {'Destination-Realm', [DR]}]);
-prepare(#diameter_packet{msg = Req}, Caps, Group)
- when ?is_record(Req, diameter_base_ASR);
- 'ASR' == hd(Req) ->
+set(N, #diameter_packet{msg = Req}, Caps, Group)
+ when N == {record, diameter_base_ASR};
+ N == {record, nas_ASR};
+ N == {map, 'ASR'};
+ N == {list, 'ASR'} ->
#diameter_caps{origin_host = {OH, DH},
origin_realm = {OR, DR}}
= Caps,
- set(Group, Req, [{'Session-Id', diameter:session_id(OH)},
- {'Origin-Host', OH},
- {'Origin-Realm', OR},
- {'Destination-Host', DH},
- {'Destination-Realm', DR},
+ set(Group, Req, [{'Session-Id', [diameter:session_id(OH)]},
+ {'Origin-Host', [OH]},
+ {'Origin-Realm', [OR]},
+ {'Destination-Host', [DH]},
+ {'Destination-Realm', [DR]},
{'Auth-Application-Id', ?APP_ID}]);
-prepare(#diameter_packet{msg = Req}, Caps, Group)
- when ?is_record(Req, diameter_base_STR);
- 'STR' == hd(Req) ->
+set(N, #diameter_packet{msg = Req}, Caps, Group)
+ when N == {record, diameter_base_STR};
+ N == {record, nas_STR};
+ N == {map, 'STR'};
+ N == {list, 'STR'} ->
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, DR}}
= Caps,
- set(Group, Req, [{'Session-Id', diameter:session_id(OH)},
- {'Origin-Host', OH},
- {'Origin-Realm', OR},
- {'Destination-Realm', DR},
+ set(Group, Req, [{'Session-Id', [diameter:session_id(OH)]},
+ {'Origin-Host', [OH]},
+ {'Origin-Realm', [OR]},
+ {'Destination-Realm', [DR]},
{'Auth-Application-Id', ?APP_ID}]);
-prepare(#diameter_packet{msg = Req}, Caps, Group)
- when ?is_record(Req, diameter_base_RAR);
- 'RAR' == hd(Req) ->
+set(N, #diameter_packet{msg = Req}, Caps, Group)
+ when N == {record, diameter_base_RAR};
+ N == {record, nas_RAR};
+ N == {map, 'RAR'};
+ N == {list, 'RAR'} ->
#diameter_caps{origin_host = {OH, DH},
origin_realm = {OR, DR}}
= Caps,
- set(Group, Req, [{'Session-Id', diameter:session_id(OH)},
- {'Origin-Host', OH},
- {'Origin-Realm', OR},
- {'Destination-Host', DH},
- {'Destination-Realm', DR},
+ set(Group, Req, [{'Session-Id', [diameter:session_id(OH)]},
+ {'Origin-Host', [OH]},
+ {'Origin-Realm', [OR]},
+ {'Destination-Host', [DH]},
+ {'Destination-Realm', [DR]},
{'Auth-Application-Id', ?APP_ID}]).
-%% prepare_retransmit/5
+%% name/1
+
+name([H|#{}]) ->
+ {map, H};
+
+name([H|_]) ->
+ {list, H};
-prepare_retransmit(_Pkt, false, _Peer, _Name, _Group) ->
+name(Rec) ->
+ try
+ {record, element(1, Rec)}
+ catch
+ error: badarg ->
+ false
+ end.
+
+%% prepare_retransmit/6
+
+prepare_retransmit(_Pkt, false, _Peer, _Group, _Name, _) ->
discard.
-%% handle_answer/6-7
+%% handle_answer/7-8
-handle_answer(Pkt, Req, [$C|_], Peer, {Name, Group}, _) ->
+handle_answer(Pkt, Req, [$C|_], Peer, Group, Name, _) ->
answer(Pkt, Req, Peer, Name, Group).
-handle_answer(Pkt, Req, [$C|_], Peer, {send_detach = Name, Group}, _, X) ->
+handle_answer(Pkt, Req, [$C|_], Peer, Group, send_detach = Name, _, X) ->
{Pid, Ref} = X,
Pid ! {Ref, answer(Pkt, Req, Peer, Name, Group)}.
-answer(Pkt, Req, _Peer, Name, #group{client_dict0 = Dict0}) ->
+answer(Pkt, Req, _Peer, Name, #group{client_dict = Dict0}) ->
#diameter_packet{header = H, msg = Ans, errors = Es} = Pkt,
ApplId = app(Req, Name, Dict0),
#diameter_header{application_id = ApplId} = H, %% assert
- Dict = dict(Ans, Dict0),
- [R | Vs] = Dict:'#get-'(answer(Ans, Es, Name)),
- [Dict:rec2msg(R) | Vs].
+ answer(Ans, Es, Name).
%% Missing Result-Code and inappropriate Experimental-Result-Code.
-answer(Rec, Es, send_experimental_result) ->
+answer(Ans, Es, send_experimental_result) ->
[{5004, #diameter_avp{name = 'Experimental-Result'}},
{5005, #diameter_avp{name = 'Result-Code'}}]
= Es,
- Rec;
+ Ans;
%% An inappropriate E-bit results in a decode error ...
-answer(Rec, Es, send_bad_answer) ->
+answer(Ans, Es, send_bad_answer) ->
[{5004, #diameter_avp{name = 'Result-Code'}} | _] = Es,
- Rec;
+ Ans;
%% ... while other errors are reflected in Failed-AVP.
-answer(Rec, [], _) ->
- Rec.
+answer(Ans, [], _) ->
+ Ans.
app(_, send_unsupported_app, _) ->
?BAD_APP;
@@ -1317,25 +1630,29 @@ app(Req, _, Dict0) ->
Dict = dict(Req, Dict0),
Dict:id().
-%% handle_error/6
+%% handle_error/7
-handle_error(timeout = Reason, _Req, [$C|_], _Peer, _, Time) ->
+handle_error(timeout = Reason, _Req, [$C|_], _Peer, _, _, Time) ->
Now = diameter_lib:now(),
{Reason, {diameter_lib:timestamp(Time),
diameter_lib:timestamp(Now),
diameter_lib:micro_diff(Now, Time)}};
-handle_error(Reason, _Req, [$C|_], _Peer, _, _Time) ->
+handle_error(Reason, _Req, [$C|_], _Peer, _, _, _Time) ->
{error, Reason}.
-%% handle_request/3
+%% handle_request/4
%% Note that diameter will set Result-Code and Failed-AVPs if
%% #diameter_packet.errors is non-null.
-handle_request(#diameter_packet{header = H, msg = M, avps = As},
+handle_request(#diameter_packet{header = H, avps = As}
+ = Pkt,
_,
- {_Ref, Caps}) ->
+ {_Ref, Caps},
+ #group{encoding = E,
+ server_decoding = D}
+ = Grp) ->
#diameter_header{end_to_end_id = EI,
hop_by_hop_id = HI}
= H,
@@ -1343,24 +1660,62 @@ handle_request(#diameter_packet{header = H, msg = M, avps = As},
V = EI bsr B, %% assert
V = HI bsr B, %%
#diameter_caps{origin_state_id = {_,[Id]}} = Caps,
- answer(origin(Id), request(M, [H|As], Caps)).
+ {D,E} = T = origin(Id), %% assert
+ wrap(T, H, request(to_map(Pkt, Grp), [H|As], Caps)).
+
+wrap(Id, H, {Tag, Action, Post}) ->
+ {Tag, wrap(Id, H, Action), Post};
-answer(T, {Tag, Action, Post}) ->
- {Tag, answer(T, Action), Post};
-answer(_, {reply, [#diameter_header{} | _]} = T) ->
+wrap(_, _, {reply, [#diameter_header{} | _]} = T) ->
T;
-answer({A,C}, {reply, Ans}) ->
- answer(C, {reply, msg(Ans, A, diameter_gen_base_rfc3588)});
-answer(pkt, {reply, Ans})
- when not is_record(Ans, diameter_packet) ->
- {reply, #diameter_packet{msg = Ans}};
-answer(_, T) ->
+
+wrap({_,E}, H, {reply, Ans}) ->
+ Msg = base_to_nas(msg(Ans, E, diameter_gen_base_rfc3588), H),
+ {reply, wrap(Msg)};
+
+wrap(_, _, T) ->
T.
+%% Randomly wrap the answer in a diameter_packet.
+
+wrap(#diameter_packet{} = Pkt) ->
+ Pkt;
+
+wrap(Msg) ->
+ case rand:uniform(2) of
+ 1 -> #diameter_packet{msg = Msg};
+ 2 -> Msg
+ end.
+
+%% base_to_nas/2
+
+base_to_nas(#diameter_packet{msg = Msg} = Pkt, H) ->
+ Pkt#diameter_packet{msg = base_to_nas(Msg, H)};
+
+base_to_nas(Rec, #diameter_header{application_id = 1})
+ when is_tuple(Rec), not ?is_record(Rec, 'diameter_base_answer-message') ->
+ D = case element(1, Rec) of
+ diameter_base_accounting_ACA ->
+ diameter_gen_base_accounting;
+ _ ->
+ diameter_gen_base_rfc3588
+ end,
+ [R | Values] = D:'#get-'(Rec),
+ "diameter_base_" ++ N = ?L(R),
+ Name = ?A("nas_" ++ if N == "accounting_ACA" ->
+ "ACA";
+ true ->
+ N
+ end),
+ nas4005:'#new-'([Name | Values]);
+
+base_to_nas(Msg, _) ->
+ Msg.
+
%% request/3
%% send_experimental_result
-request(#diameter_base_accounting_ACR{'Accounting-Record-Number' = 5},
+request(['ACR' | #{'Accounting-Record-Number' := 5}],
[Hdr | Avps],
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, _}}) ->
@@ -1393,14 +1748,14 @@ request(Msg, _Avps, Caps) ->
%% request/2
%% send_nok
-request(#diameter_base_accounting_ACR{'Accounting-Record-Number' = 0},
+request(['ACR' | #{'Accounting-Record-Number' := 0}],
_) ->
{eval_packet, {protocol_error, ?INVALID_AVP_BITS}, [fun log/2, invalid]};
%% send_bad_answer
-request(#diameter_base_accounting_ACR{'Session-Id' = SId,
- 'Accounting-Record-Type' = RT,
- 'Accounting-Record-Number' = 2 = RN},
+request(['ACR' | #{'Session-Id' := SId,
+ 'Accounting-Record-Type' := RT,
+ 'Accounting-Record-Number' := 2 = RN}],
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, _}}) ->
Ans = ['ACA', {'Result-Code', ?SUCCESS},
@@ -1414,9 +1769,9 @@ request(#diameter_base_accounting_ACR{'Session-Id' = SId,
msg = Ans}};
%% send_eval
-request(#diameter_base_accounting_ACR{'Session-Id' = SId,
- 'Accounting-Record-Type' = RT,
- 'Accounting-Record-Number' = 3 = RN},
+request(['ACR' | #{'Session-Id' := SId,
+ 'Accounting-Record-Type' := RT,
+ 'Accounting-Record-Number' := 3 = RN}],
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, _}}) ->
Ans = ['ACA', {'Result-Code', ?SUCCESS},
@@ -1428,9 +1783,9 @@ request(#diameter_base_accounting_ACR{'Session-Id' = SId,
{eval, {reply, Ans}, {erlang, now, []}};
%% send_ok
-request(#diameter_base_accounting_ACR{'Session-Id' = SId,
- 'Accounting-Record-Type' = RT,
- 'Accounting-Record-Number' = 1 = RN},
+request(['ACR' | #{'Session-Id' := SId,
+ 'Accounting-Record-Type' := RT,
+ 'Accounting-Record-Number' := 1 = RN}],
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, _}}) ->
{reply, ['ACA', {'Result-Code', ?SUCCESS},
@@ -1441,48 +1796,69 @@ request(#diameter_base_accounting_ACR{'Session-Id' = SId,
{'Accounting-Record-Number', RN}]};
%% send_protocol_error
-request(#diameter_base_accounting_ACR{'Accounting-Record-Number' = 4},
+request(['ACR' | #{'Accounting-Record-Number' := 4}],
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, _}}) ->
+ %% Include a DOIC AVP that will be encoded/decoded because of
+ %% avp_dictionaries config.
+ OLR = #{'OC-Sequence-Number' => 1,
+ 'OC-Report-Type' => 0, %% HOST_REPORT
+ 'OC-Reduction-Percentage' => [25],
+ 'OC-Validity-Duration' => [60],
+ 'AVP' => [{'OC-Supported-Features', []}]},
+ %% Include a NAS Failed-AVP AVP that will only be decoded under
+ %% that application. Encode as 'AVP' since RFC 3588 doesn't list
+ %% Failed-AVP in the answer-message grammar while RFC 6733 does.
+ NP = #diameter_avp{data = {nas4005, 'NAS-Port', 44}},
+ FR = #diameter_avp{name = 'Firmware-Revision', value = 12}, %% M=0
+ AP = #diameter_avp{name = 'Auth-Grace-Period', value = 13}, %% M=1
+ Failed = #diameter_avp{data = {diameter_gen_base_rfc3588,
+ 'Failed-AVP',
+ [{'AVP', [NP,FR,AP]}]}},
Ans = ['answer-message', {'Result-Code', ?TOO_BUSY},
{'Origin-Host', OH},
- {'Origin-Realm', OR}],
+ {'Origin-Realm', OR},
+ {'AVP', [{'OC-OLR', OLR}, Failed]}],
{reply, Ans};
-request(#diameter_base_ASR{'Session-Id' = SId,
- 'AVP' = Avps},
+%% send_proxy_info
+request(['ASR' | #{'Proxy-Info' := _}],
+ _) ->
+ {protocol_error, 3999};
+
+request(['ASR' | #{'Session-Id' := SId} = Avps],
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, _}}) ->
{reply, ['ASA', {'Result-Code', ?SUCCESS},
{'Session-Id', SId},
{'Origin-Host', OH},
{'Origin-Realm', OR},
- {'AVP', Avps}]};
+ {'AVP', maps:get('AVP', Avps, [])}]};
%% send_invalid_reject
-request(#diameter_base_STR{'Termination-Cause' = ?USER_MOVED},
+request(['STR' | #{'Termination-Cause' := ?USER_MOVED}],
_Caps) ->
{protocol_error, ?TOO_BUSY};
%% send_noreply
-request(#diameter_base_STR{'Termination-Cause' = T},
+request(['STR' | #{'Termination-Cause' := T}],
_Caps)
when T /= ?LOGOUT ->
discard;
%% send_destination_5
-request(#diameter_base_STR{'Destination-Realm' = R},
+request(['STR' | #{'Destination-Realm' := R}],
#diameter_caps{origin_realm = {OR, _}})
when R /= undefined, R /= OR ->
{protocol_error, ?REALM_NOT_SERVED};
%% send_destination_6
-request(#diameter_base_STR{'Destination-Host' = [H]},
+request(['STR' | #{'Destination-Host' := [H]}],
#diameter_caps{origin_host = {OH, _}})
when H /= OH ->
{protocol_error, ?UNABLE_TO_DELIVER};
-request(#diameter_base_STR{'Session-Id' = SId},
+request(['STR' | #{'Session-Id' := SId}],
#diameter_caps{origin_host = {OH, _},
origin_realm = {OR, _}}) ->
{reply, ['STA', {'Result-Code', ?SUCCESS},
@@ -1491,7 +1867,7 @@ request(#diameter_base_STR{'Session-Id' = SId},
{'Origin-Realm', OR}]};
%% send_error/send_timeout
-request(#diameter_base_RAR{}, _Caps) ->
+request(['RAR' | #{}], _Caps) ->
receive after 2000 -> {protocol_error, ?TOO_BUSY} end.
%% message/3
@@ -1505,8 +1881,8 @@ message(Dir, #diameter_packet{bin = Bin}, N) ->
message(Dir, Bin, N);
%% incoming request
-message(recv, <<_:32, 1, _/bits>> = Bin, N) ->
- [Bin, 1 < N, fun ?MODULE:message/3, N-1];
+message(recv, <<_:32, 1:1, _/bits>> = Bin, N) ->
+ [Bin, N < 16, fun ?MODULE:message/3, N+1];
%% incoming answer
message(recv, Bin, _) ->
@@ -1517,9 +1893,35 @@ message(send, Bin, _) ->
[Bin];
%% sent request
-message(ack, <<_:32, 1, _/bits>>, _) ->
+message(ack, <<_:32, 1:1, _/bits>>, _) ->
[];
%% sent answer or discarded request
message(ack, _, N) ->
- [0 =< N, fun ?MODULE:message/3, N+1].
+ [N =< 16, fun ?MODULE:message/3, N-1].
+
+%% ------------------------------------------------------------------------
+
+compile_and_load() ->
+ try
+ Path = hd([P || H <- [[here(), ".."], [code:lib_dir(diameter)]],
+ P <- [filename:join(H ++ ["examples",
+ "dict",
+ "rfc4005_nas.dia"])],
+ {ok, _} <- [file:read_file_info(P)]]),
+ {ok, [Forms]}
+ = diameter_make:codec(Path, [return,
+ forms,
+ {name, "nas4005"},
+ {prefix, "nas"},
+ {inherits, "common/diameter_gen_base_rfc3588"}]),
+ {ok, nas4005, Bin, []} = compile:forms(Forms, [debug_info, return]),
+ {module, nas4005} = code:load_binary(nas4005, "nas4005", Bin),
+ true
+ catch
+ E:R ->
+ {E, R, erlang:get_stacktrace()}
+ end.
+
+here() ->
+ filename:dirname(code:which(?MODULE)).
diff --git a/lib/diameter/test/diameter_transport_SUITE.erl b/lib/diameter/test/diameter_transport_SUITE.erl
index 9d981d0a2b..284d2b9566 100644
--- a/lib/diameter/test/diameter_transport_SUITE.erl
+++ b/lib/diameter/test/diameter_transport_SUITE.erl
@@ -349,35 +349,40 @@ rand_bytes(N) ->
%% start_connect/3
start_connect(Prot, PortNr, Ref) ->
- {ok, TPid, [?ADDR]} = start_connect(Prot,
- {connect, Ref},
- ?SVC([]),
- [{raddr, ?ADDR},
- {rport, PortNr},
- {ip, ?ADDR},
- {port, 0}]),
- ?RECV(?TMSG({TPid, connected, _})),
+ {ok, TPid} = start_connect(Prot,
+ {connect, Ref},
+ ?SVC([]),
+ [{raddr, ?ADDR},
+ {rport, PortNr},
+ {ip, ?ADDR},
+ {port, 0}]),
+ connected(Prot, TPid),
TPid.
+connected(sctp, TPid) ->
+ ?RECV(?TMSG({TPid, connected, _}));
+connected(tcp, TPid) ->
+ ?RECV(?TMSG({TPid, connected, _, [?ADDR]})).
+
start_connect(sctp, T, Svc, Opts) ->
- diameter_sctp:start(T, Svc, [{sctp_initmsg, ?SCTP_INIT} | Opts]);
+ {ok, TPid, [?ADDR]}
+ = diameter_sctp:start(T, Svc, [{sctp_initmsg, ?SCTP_INIT} | Opts]),
+ {ok, TPid};
start_connect(tcp, T, Svc, Opts) ->
diameter_tcp:start(T, Svc, Opts).
%% start_accept/2
start_accept(Prot, Ref) ->
- {Mod, Opts} = tmod(Prot),
- {ok, TPid, [?ADDR]} = Mod:start({accept, Ref},
- ?SVC([?ADDR]),
- [{port, 0} | Opts]),
+ {ok, TPid, [?ADDR]}
+ = start_accept(Prot, {accept, Ref}, ?SVC([?ADDR]), [{port, 0}]),
?RECV(?TMSG({TPid, connected})),
TPid.
-tmod(sctp) ->
- {diameter_sctp, [{sctp_initmsg, ?SCTP_INIT}]};
-tmod(tcp) ->
- {diameter_tcp, []}.
+start_accept(sctp, T, Svc, Opts) ->
+ diameter_sctp:start(T, Svc, [{sctp_initmsg, ?SCTP_INIT} | Opts]);
+start_accept(tcp, T, Svc, Opts) ->
+ diameter_tcp:start(T, Svc, Opts).
%% ===========================================================================
diff --git a/lib/diameter/test/diameter_util.erl b/lib/diameter/test/diameter_util.erl
index 03f79096ac..d249b0e4fa 100644
--- a/lib/diameter/test/diameter_util.erl
+++ b/lib/diameter/test/diameter_util.erl
@@ -32,7 +32,8 @@
foldl/3,
scramble/1,
unique_string/0,
- have_sctp/0]).
+ have_sctp/0,
+ eprof/1]).
%% diameter-specific
-export([lport/2,
@@ -48,6 +49,16 @@
-define(L, atom_to_list).
+%% ---------------------------------------------------------------------------
+
+eprof(start) ->
+ eprof:start(),
+ eprof:start_profiling([self()]);
+
+eprof(stop) ->
+ eprof:stop_profiling(),
+ eprof:analyze(),
+ eprof:stop().
%% ---------------------------------------------------------------------------
%% name/2
diff --git a/lib/edoc/src/edoc_doclet.erl b/lib/edoc/src/edoc_doclet.erl
index 6e17ec0af0..0e084e619e 100644
--- a/lib/edoc/src/edoc_doclet.erl
+++ b/lib/edoc/src/edoc_doclet.erl
@@ -198,7 +198,7 @@ source({M, Name, Path}, Dir, Suffix, Env, Set, Private, Hidden,
{Set, Error}
end;
R ->
- report("skipping source file '~ts': ~P.", [File, R, 15]),
+ report("skipping source file '~ts': ~tP.", [File, R, 15]),
{Set, true}
end.
diff --git a/lib/edoc/src/edoc_lib.erl b/lib/edoc/src/edoc_lib.erl
index ebdb0f79f6..d00a283794 100644
--- a/lib/edoc/src/edoc_lib.erl
+++ b/lib/edoc/src/edoc_lib.erl
@@ -541,13 +541,13 @@ uri_get_http_1(Result, URI) ->
Reason = inet:format_error(R),
{error, http_errmsg(Reason, URI)};
{ok, R} ->
- Reason = io_lib:format("bad return value ~P", [R, 5]),
+ Reason = io_lib:format("bad return value ~tP", [R, 5]),
{error, http_errmsg(Reason, URI)};
{'EXIT', R} ->
- Reason = io_lib:format("crashed with reason ~w", [R]),
+ Reason = io_lib:format("crashed with reason ~tw", [R]),
{error, http_errmsg(Reason, URI)};
R ->
- Reason = io_lib:format("uncaught throw: ~w", [R]),
+ Reason = io_lib:format("uncaught throw: ~tw", [R]),
{error, http_errmsg(Reason, URI)}
end.
@@ -603,7 +603,7 @@ filename([]) ->
filename(N) when is_atom(N) ->
atom_to_list(N);
filename(N) ->
- report("bad filename: `~P'.", [N, 25]),
+ report("bad filename: `~tP'.", [N, 25]),
exit(error).
%% @private
@@ -1000,7 +1000,7 @@ run_plugin(Name, Key, Default, Fun, Opts) when is_atom(Name) ->
{ok, Value} ->
Value;
R ->
- report("error in ~ts '~w': ~P.", [Name, Module, R, 20]),
+ report("error in ~ts '~w': ~tP.", [Name, Module, R, 20]),
exit(error)
end.
@@ -1009,7 +1009,7 @@ get_plugin(Key, Default, Opts) ->
M when is_atom(M) ->
M;
Other ->
- report("bad value for option '~w': ~P.", [Key, Other, 10]),
+ report("bad value for option '~w': ~tP.", [Key, Other, 10]),
exit(error)
end.
diff --git a/lib/edoc/src/edoc_run.erl b/lib/edoc/src/edoc_run.erl
index c88c6cfd78..50aba0a930 100644
--- a/lib/edoc/src/edoc_run.erl
+++ b/lib/edoc/src/edoc_run.erl
@@ -150,7 +150,7 @@ file(Args) ->
-spec invalid_args(string(), args()) -> no_return().
invalid_args(Where, Args) ->
- report("invalid arguments to ~ts: ~w.", [Where, Args]),
+ report("invalid arguments to ~ts: ~tw.", [Where, Args]),
shutdown_error().
run(F) ->
@@ -159,10 +159,10 @@ run(F) ->
{ok, _} ->
shutdown_ok();
{'EXIT', E} ->
- report("edoc terminated abnormally: ~P.", [E, 10]),
+ report("edoc terminated abnormally: ~tP.", [E, 10]),
shutdown_error();
Thrown ->
- report("internal error: throw without catch in edoc: ~P.",
+ report("internal error: throw without catch in edoc: ~tP.",
[Thrown, 15]),
shutdown_error()
end.
diff --git a/lib/edoc/src/edoc_types.erl b/lib/edoc/src/edoc_types.erl
index ccc3169767..510f9513b2 100644
--- a/lib/edoc/src/edoc_types.erl
+++ b/lib/edoc/src/edoc_types.erl
@@ -107,7 +107,7 @@ to_xml(#t_paren{type = T}, Env) ->
to_xml(#t_nonempty_list{type = T}, Env) ->
{nonempty_list, [wrap_utype(T, Env)]};
to_xml(#t_atom{val = V}, _Env) ->
- {atom, [{value, io_lib:write(V)}], []};
+ {atom, [{value, atom_to_list(V)}], []};
to_xml(#t_integer{val = V}, _Env) ->
{integer, [{value, integer_to_list(V)}], []};
to_xml(#t_integer_range{from = From, to = To}, _Env) ->
diff --git a/lib/erl_docgen/priv/xsl/db_html.xsl b/lib/erl_docgen/priv/xsl/db_html.xsl
index d863c056e9..a5e277aece 100644
--- a/lib/erl_docgen/priv/xsl/db_html.xsl
+++ b/lib/erl_docgen/priv/xsl/db_html.xsl
@@ -1803,7 +1803,7 @@
<!-- Modulesummary -->
<xsl:template match="modulesummary">
<xsl:param name="partnum"/>
- <h3><a name="module-sumary" href="#module-sumary">Module Summary</a></h3>
+ <h3><a name="module-summary" href="#module-summary">Module Summary</a></h3>
<div class="REFBODY module-summary-body">
<xsl:apply-templates>
<xsl:with-param name="partnum" select="$partnum"/>
@@ -1826,7 +1826,7 @@
<!-- Libsummary -->
<xsl:template match="libsummary">
<xsl:param name="partnum"/>
- <h3><a name="library-sumary" href="#library-sumary">Library Summary</a></h3>
+ <h3><a name="library-summary" href="#library-summary">Library Summary</a></h3>
<div class="REFBODY library-summary-body">
<xsl:apply-templates>
<xsl:with-param name="partnum" select="$partnum"/>
diff --git a/lib/erl_docgen/src/docgen_otp_specs.erl b/lib/erl_docgen/src/docgen_otp_specs.erl
index 6c41147e27..126229ecc9 100644
--- a/lib/erl_docgen/src/docgen_otp_specs.erl
+++ b/lib/erl_docgen/src/docgen_otp_specs.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -406,7 +406,7 @@ t_var(E) ->
[get_attrval(name, E)].
t_atom(E) ->
- [get_attrval(value, E)].
+ [io_lib:write(list_to_atom(get_attrval(value, E)))].
t_integer(E) ->
[get_attrval(value, E)].
@@ -578,7 +578,7 @@ ot_var(E) ->
{var,0,list_to_atom(get_attrval(name, E))}.
ot_atom(E) ->
- {ok, [{atom,A,Name}], _} = erl_scan:string(get_attrval(value, E), 0),
+ {ok, [{atom,A,Name}], _} = erl_scan:string(lists:flatten(t_atom(E)), 0),
{atom,erl_anno:line(A),Name}.
ot_integer(E) ->
diff --git a/lib/et/src/et_collector.erl b/lib/et/src/et_collector.erl
index aba90b0be1..b0f016a8ea 100644
--- a/lib/et/src/et_collector.erl
+++ b/lib/et/src/et_collector.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -750,7 +750,7 @@ next_iterate(TH, Prev = first, Limit, Fun, Acc) ->
'$end_of_table' ->
Acc;
{'EXIT', _} = Error ->
- io:format("~p(~p): First ~p~n", [?MODULE, ?LINE, Error]),
+ io:format("~p(~p): First ~tp~n", [?MODULE, ?LINE, Error]),
iterate(TH#table_handle.collector_pid, Prev, Limit, Fun, Acc);
First ->
lookup_and_apply(TH, Prev, First, Limit, -1, Fun, Acc)
@@ -761,7 +761,7 @@ next_iterate(TH, Prev = last, Limit, Fun, Acc) ->
'$end_of_table' ->
Acc;
{'EXIT', _} = Error ->
- io:format("~p(~p): Last ~p~n", [?MODULE, ?LINE, Error]),
+ io:format("~p(~p): Last ~tp~n", [?MODULE, ?LINE, Error]),
iterate(TH#table_handle.collector_pid, Prev, Limit, Fun, Acc);
Last ->
lookup_and_apply(TH, Prev, Last, Limit, -1, Fun, Acc)
@@ -773,7 +773,7 @@ next_iterate(TH, Prev, Limit, Fun, Acc) ->
'$end_of_table' ->
Acc;
{'EXIT', _} = Error ->
- io:format("~p(~p): Next ~p -> ~p~n", [?MODULE, ?LINE, Key, Error]),
+ io:format("~p(~p): Next ~tp -> ~tp~n", [?MODULE, ?LINE, Key, Error]),
iterate(TH#table_handle.collector_pid, Prev, Limit, Fun, Acc);
Next ->
lookup_and_apply(TH, Prev, Next, Limit, -1, Fun, Acc)
@@ -785,7 +785,7 @@ prev_iterate(TH, Prev = first, Limit, Fun, Acc) ->
'$end_of_table' ->
Acc;
{'EXIT', _} = Error ->
- io:format("~p(~p): First ~p~n", [?MODULE, ?LINE, Error]),
+ io:format("~p(~p): First ~tp~n", [?MODULE, ?LINE, Error]),
iterate(TH#table_handle.collector_pid, Prev, Limit, Fun, Acc);
First ->
lookup_and_apply(TH, Prev, First, Limit, 1, Fun, Acc)
@@ -796,7 +796,7 @@ prev_iterate(TH, Prev = last, Limit, Fun, Acc) ->
'$end_of_table' ->
Acc;
{'EXIT', _} = Error ->
- io:format("~p(~p): Last ~p~n", [?MODULE, ?LINE, Error]),
+ io:format("~p(~p): Last ~tp~n", [?MODULE, ?LINE, Error]),
iterate(TH#table_handle.collector_pid, Prev, Limit, Fun, Acc);
Last ->
lookup_and_apply(TH, Prev, Last, Limit, 1, Fun, Acc)
@@ -808,7 +808,7 @@ prev_iterate(TH, Prev, Limit, Fun, Acc) ->
'$end_of_table' ->
Acc;
{'EXIT', _} = Error ->
- io:format("~p(~p): Prev ~p -> ~p~n", [?MODULE, ?LINE, Key, Error]),
+ io:format("~p(~p): Prev ~tp -> ~tp~n", [?MODULE, ?LINE, Key, Error]),
iterate(TH#table_handle.collector_pid, Prev, Limit, Fun, Acc);
Next ->
lookup_and_apply(TH, Prev, Next, Limit, 1, Fun, Acc)
@@ -1049,7 +1049,7 @@ handle_call(stop, _From, S) ->
end,
{stop, shutdown, ok, S};
handle_call(Request, From, S) ->
- ok = error_logger:format("~p(~p): handle_call(~p, ~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_call(~tp, ~tp, ~tp)~n",
[?MODULE, self(), Request, From, S]),
reply({error, {bad_request, Request}}, S).
@@ -1061,7 +1061,7 @@ handle_call(Request, From, S) ->
%%----------------------------------------------------------------------
handle_cast(Msg, S) ->
- ok = error_logger:format("~p(~p): handle_cast(~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_cast(~tp, ~tp)~n",
[?MODULE, self(), Msg, S]),
noreply(S).
@@ -1083,18 +1083,18 @@ handle_info({nodeup, Node}, S) ->
S2 = listen_on_trace_port(Node, Port, S),
noreply(S2);
{error, Reason} when Reason =:= already_started->
- ok = error_logger:format("~p(~p): producer ignored(~p:~p):~n ~p~n",
+ ok = error_logger:format("~p(~p): producer ignored(~p:~p):~n ~tp~n",
[?MODULE, self(), Node, Port, Reason]),
S2 = S#state{trace_port = Port + 1},
noreply(S2);
{badrpc, Reason} ->
- ok = error_logger:format("~p(~p): producer ignored(~p:~p):~n ~p~n",
+ ok = error_logger:format("~p(~p): producer ignored(~p:~p):~n ~tp~n",
[?MODULE, self(), Node, Port, Reason]),
S2 = S#state{trace_port = Port + 1},
noreply(S2);
{error, Reason} ->
self() ! {nodeup, Node},
- ok = error_logger:format("~p(~p): producer retry(~p:~p):~n ~p~n",
+ ok = error_logger:format("~p(~p): producer retry(~p:~p):~n ~tp~n",
[?MODULE, self(), Node, Port, Reason]),
S2 = S#state{trace_port = Port + 1},
noreply(S2)
@@ -1125,12 +1125,12 @@ handle_info(Info = {'EXIT', Pid, Reason}, S) ->
opt_unlink(S#state.parent_pid),
{stop, Reason, S};
false ->
- ok = error_logger:format("~p(~p): handle_info(~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_info(~tp, ~tp)~n",
[?MODULE, self(), Info, S]),
noreply(S)
end;
handle_info(Info, S) ->
- ok = error_logger:format("~p(~p): handle_info(~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_info(~tp, ~tp)~n",
[?MODULE, self(), Info, S]),
noreply(S).
@@ -1143,12 +1143,12 @@ listen_on_trace_port(Node, Port, S) ->
S#state{trace_nodes = [Node | S#state.trace_nodes],
trace_port = Port + 1};
{'EXIT', Reason} when Reason =:= already_started->
- ok = error_logger:format("~p(~p): consumer ignored(~p:~p): ~p~n",
+ ok = error_logger:format("~p(~p): consumer ignored(~p:~p): ~tp~n",
[?MODULE, self(), Node, Port, Reason]),
S#state{trace_port = Port + 1};
{'EXIT', Reason} ->
self() ! {nodeup, Node},
- ok = error_logger:format("~p(~p): consumer retry(~p:~p):~n ~p~n",
+ ok = error_logger:format("~p(~p): consumer retry(~p:~p):~n ~tp~n",
[?MODULE, self(), Node, Port, Reason]),
S#state{trace_port = Port + 1}
end.
@@ -1247,7 +1247,7 @@ file_open(F) ->
{ok, _} ->
{ok, Fd};
{repaired, _, _, BadBytes} ->
- ok = error_logger:format("~p: Skipped ~p bad bytes in file: ~p~n",
+ ok = error_logger:format("~p: Skipped ~p bad bytes in file: ~tp~n",
[?MODULE, BadBytes, F#file.name]),
{ok, Fd};
{error,Reason} ->
diff --git a/lib/et/src/et_selector.erl b/lib/et/src/et_selector.erl
index a0297c21d1..35db07cd99 100644
--- a/lib/et/src/et_selector.erl
+++ b/lib/et/src/et_selector.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -208,7 +208,7 @@ parse_event(Mod, Trace) ->
{to, undefined},
{drop, NumberOfDroppedItems}]}};
_ ->
- error_logger:format("~p(~p): Ignoring unknown trace type -> ~p~n~n",
+ error_logger:format("~p(~p): Ignoring unknown trace type -> ~tp~n~n",
[?MODULE, ?LINE, Trace]),
false
end.
@@ -258,7 +258,7 @@ parse_seq_event(Trace, ParsedTS, ReportedTS, Label, Info) ->
{serial, Serial},
{user_info, UserInfo}]}};
_ ->
- error_logger:format("~p(~p): Ignoring unknown trace type -> ~p~n~n",
+ error_logger:format("~p(~p): Ignoring unknown trace type -> ~tp~n~n",
[?MODULE, ?LINE, Trace]),
false
end.
@@ -590,7 +590,7 @@ parse_event(Mod, Trace, ParsedTS, ReportedTS, From, Label, Contents) ->
{to, From},
{gc_items, GcKeyValueList}]}};
_ ->
- error_logger:format("~p(~p): Ignoring unknown trace type -> ~p~n~n",
+ error_logger:format("~p(~p): Ignoring unknown trace type -> ~tp~n~n",
[?MODULE, ?LINE, Trace]),
false
end.
diff --git a/lib/et/src/et_wx_contents_viewer.erl b/lib/et/src/et_wx_contents_viewer.erl
index 247dd4c7ba..bca517317e 100644
--- a/lib/et/src/et_wx_contents_viewer.erl
+++ b/lib/et/src/et_wx_contents_viewer.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -213,7 +213,7 @@ init([S]) when is_record(S, state) ->
%%----------------------------------------------------------------------
handle_call(Request, From, S) ->
- ok = error_logger:format("~p(~p): handle_call(~p, ~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_call(~tp, ~tp, ~tp)~n",
[?MODULE, self(), Request, From, S]),
Reply = {error, {bad_request, Request}},
{reply, Reply, S}.
@@ -226,7 +226,7 @@ handle_call(Request, From, S) ->
%%----------------------------------------------------------------------
handle_cast(Msg, S) ->
- ok = error_logger:format("~p(~p): handle_cast(~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_cast(~tp, ~tp)~n",
[?MODULE, self(), Msg, S]),
{noreply, S}.
@@ -272,10 +272,11 @@ handle_event(#wx{id = Id,
end,
FileName = lists:flatten(["et_contents_viewer_", now_to_string(TimeStamp), ".txt"]),
Style = ?wxFD_SAVE bor ?wxFD_OVERWRITE_PROMPT,
- Msg = "Select a file to the events to",
+ Msg = "Select a file to save events to",
case select_file(S#state.frame, Msg, filename:absname(FileName), Style) of
{ok, FileName2} ->
- Bin = list_to_binary(event_to_string(Event, S#state.event_order)),
+ EventString = event_to_string(Event, S#state.event_order),
+ Bin = unicode:characters_to_binary(EventString),
ok = file:write_file(FileName2, Bin);
cancel ->
ok
@@ -381,7 +382,7 @@ handle_event(#wx{event = #wxSize{size = {W, H}}}, S) ->
S2 = S#state{width = W, height = H},
{noreply, S2};
handle_event(Wx = #wx{}, S) ->
- io:format("~p got an unexpected event: ~p\n", [self(), Wx]),
+ io:format("~p got an unexpected event: ~tp\n", [self(), Wx]),
{noreply, S}.
%%----------------------------------------------------------------------
@@ -405,7 +406,7 @@ handle_info({'EXIT', Pid, Reason}, S) ->
{noreply, S}
end;
handle_info(Info, S) ->
- ok = error_logger:format("~p(~p): handle_info(~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_info(~tp, ~tp)~n",
[?MODULE, self(), Info, S]),
{noreply, S}.
@@ -606,8 +607,8 @@ do_config_editor(Editor, Event, _Colour, TsKey) ->
%%%----------------------------------------------------------------------
term_to_string(Term) ->
- case catch io_lib:format("~s", [Term]) of
- {'EXIT', _} -> io_lib:format("~p", [Term]);
+ case catch io_lib:format("~ts", [Term]) of
+ {'EXIT', _} -> io_lib:format("~tp", [Term]);
GoodString -> GoodString
end.
@@ -659,7 +660,7 @@ pad_string(Int, MinLen, Char, Dir) when is_integer(Int) ->
pad_string(Atom, MinLen, Char, Dir) when is_atom(Atom) ->
pad_string(atom_to_list(Atom), MinLen, Char, Dir);
pad_string(String, MinLen, Char, Dir) when is_integer(MinLen), MinLen >= 0 ->
- Len = length(String),
+ Len = string:length(String),
case {Len >= MinLen, Dir} of
{true, _} ->
String;
diff --git a/lib/et/src/et_wx_viewer.erl b/lib/et/src/et_wx_viewer.erl
index 9613299e6b..4dd44e7a4c 100644
--- a/lib/et/src/et_wx_viewer.erl
+++ b/lib/et/src/et_wx_viewer.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -352,7 +352,7 @@ handle_call({open_event, N}, _From, S) when is_integer(N), N > 0->
Reply = do_open_event(S, N),
reply(Reply, S);
handle_call(Request, From, S) ->
- ok = error_logger:format("~p(~p): handle_call(~p, ~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_call(~tp, ~tp, ~tp)~n",
[?MODULE, self(), Request, From, S]),
Reply = {error, {bad_request, Request}},
reply(Reply, S).
@@ -365,7 +365,7 @@ handle_call(Request, From, S) ->
%%----------------------------------------------------------------------
handle_cast(Msg, S) ->
- ok = error_logger:format("~p(~p): handle_cast(~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_cast(~tp, ~tp)~n",
[?MODULE, self(), Msg, S]),
noreply(S).
@@ -803,7 +803,7 @@ handle_info(timeout, S) ->
handle_info({'EXIT', Pid, Reason}, S) ->
if
Pid =:= S#state.collector_pid ->
- io:format("collector died: ~p\n\n", [Reason]),
+ io:format("collector died: ~tp\n\n", [Reason]),
wxFrame:destroy(S#state.frame),
{stop, Reason, S};
Pid =:= S#state.parent_pid ->
@@ -853,10 +853,10 @@ handle_info(#wx{event = #wxPaint{}}, S) ->
S2 = refresh_main_window(S),
noreply(S2);
handle_info(#wx{event = #wxMouse{type = T, x=X,y=Y}}, S) ->
- io:format("~p ~p\n", [T, {X,Y}]),
+ io:format("~tp ~tp\n", [T, {X,Y}]),
noreply(S);
handle_info(Info, S) ->
- ok = error_logger:format("~p(~p): handle_info(~p, ~p)~n",
+ ok = error_logger:format("~p(~p): handle_info(~tp, ~tp)~n",
[?MODULE, self(), Info, S]),
noreply(S).
@@ -1162,7 +1162,7 @@ open_viewer(Scale, FilterName, Actors, S) ->
%% unlink(ViewerPid),
ok;
{error, Reason} ->
- ok = error_logger:format("~p: Failed to start a new window: ~p~n",
+ ok = error_logger:format("~p: Failed to start a new window: ~tp~n",
[?MODULE, Reason])
end.
@@ -1393,7 +1393,7 @@ create_filter_menu(S=#state{filter_menu = {Menu,Data}}, ActiveFilterName, Filter
wxMenu:delete(Menu,I)
catch
_:Reason ->
- io:format("Could not delete item: ~p, because ~p.\n", [I, Reason])
+ io:format("Could not delete item: ~tp, because ~tp.\n", [I, Reason])
end
end,
Data),
@@ -1872,7 +1872,7 @@ create_contents_window(Event, {S, Res}) ->
{ok, Pid} ->
{S, [{ok, Pid} | Res]};
{error, Reason} ->
- ok = error_logger:format("~p(~p): create_contents_window(~p) ->~n ~p~n",
+ ok = error_logger:format("~p(~p): create_contents_window(~tp) ->~n ~tp~n",
[?MODULE, self(), Options, Reason]),
{S, [{error, Reason} | Res]};
Stuff ->
@@ -2069,15 +2069,15 @@ create_actor(Name) ->
#actor{name = Name, string = String, include = false, exclude = false}.
name_to_string(Name) ->
- case catch io_lib:format("~s", [Name]) of
- {'EXIT', _} -> lists:flatten(io_lib:format("~w", [Name]));
+ case catch io_lib:format("~ts", [Name]) of
+ {'EXIT', _} -> lists:flatten(io_lib:format("~tw", [Name]));
GoodString -> lists:flatten(GoodString)
end.
pad_string(Atom, MinLen) when is_atom(Atom) ->
pad_string(atom_to_list(Atom), MinLen);
pad_string(String, MinLen) when is_integer(MinLen), MinLen >= 0 ->
- Len = length(String),
+ Len = string:length(String),
case Len >= MinLen of
true ->
String;
diff --git a/lib/eunit/src/eunit_lib.erl b/lib/eunit/src/eunit_lib.erl
index aa2cffc66d..e340f50a3c 100644
--- a/lib/eunit/src/eunit_lib.erl
+++ b/lib/eunit/src/eunit_lib.erl
@@ -107,7 +107,7 @@ format_stacktrace(Trace) ->
format_stacktrace(Trace, "in function", "in call from").
format_stacktrace([{M,F,A,L}|Fs], Pre, Pre1) when is_integer(A) ->
- [io_lib:fwrite("~ts ~w:~w/~w~ts\n",
+ [io_lib:fwrite("~ts ~w:~tw/~w~ts\n",
[Pre, M, F, A, format_stacktrace_location(L)])
| format_stacktrace(Fs, Pre1, Pre1)];
format_stacktrace([{M,F,As,L}|Fs], Pre, Pre1) when is_list(As) ->
@@ -121,9 +121,9 @@ format_stacktrace([{M,F,As,L}|Fs], Pre, Pre1) when is_list(As) ->
io_lib:fwrite("~ts ~ts ~ts",
[format_arg(A1),F,format_arg(A2)]);
false ->
- io_lib:fwrite("~w(~ts)", [F,format_arglist(As)])
+ io_lib:fwrite("~tw(~ts)", [F,format_arglist(As)])
end,
- [io_lib:fwrite("~ts ~w:~w/~w~ts\n called as ~ts\n",
+ [io_lib:fwrite("~ts ~w:~tw/~w~ts\n called as ~ts\n",
[Pre,M,F,A,format_stacktrace_location(L),C])
| format_stacktrace(Fs,Pre1,Pre1)];
format_stacktrace([{M,F,As}|Fs], Pre, Pre1) ->
@@ -162,15 +162,15 @@ is_op(_M, _F, _A) ->
format_error({bad_test, Term}) ->
error_msg("bad test descriptor", "~tP", [Term, 15]);
format_error({bad_generator, {{M,F,A}, Term}}) ->
- error_msg(io_lib:format("result from generator ~w:~w/~w is not a test",
+ error_msg(io_lib:format("result from generator ~w:~tw/~w is not a test",
[M,F,A]),
"~tP", [Term, 15]);
format_error({generator_failed, {{M,F,A}, Exception}}) ->
- error_msg(io_lib:format("test generator ~w:~w/~w failed",[M,F,A]),
+ error_msg(io_lib:format("test generator ~w:~tw/~w failed",[M,F,A]),
"~ts", [format_exception(Exception)]);
format_error({no_such_function, {M,F,A}})
when is_atom(M), is_atom(F), is_integer(A) ->
- error_msg(io_lib:format("no such function: ~w:~w/~w", [M,F,A]),
+ error_msg(io_lib:format("no such function: ~w:~tw/~w", [M,F,A]),
"", []);
format_error({module_not_found, M}) ->
error_msg("test module not found", "~tp", [M]);
@@ -185,7 +185,7 @@ format_error({cleanup_failed, Exception}) ->
error_msg("context cleanup failed", "~ts",
[format_exception(Exception)]);
format_error({{bad_instantiator, {{M,F,A}, Term}}, _DummyException}) ->
- error_msg(io_lib:format("result from instantiator ~w:~w/~w is not a test",
+ error_msg(io_lib:format("result from instantiator ~w:~tw/~w is not a test",
[M,F,A]),
"~tP", [Term, 15]);
format_error({instantiation_failed, Exception}) ->
@@ -384,11 +384,9 @@ fun_parent(F) ->
{arity, A} = erlang:fun_info(F, arity),
{M, N, A};
{type, local} ->
- [$-|S] = atom_to_list(N),
- C1 = string:chr(S, $/),
- C2 = string:chr(S, $-),
- {M, list_to_atom(string:sub_string(S, 1, C1 - 1)),
- list_to_integer(string:sub_string(S, C1 + 1, C2 - 1))}
+ [$-|S] = atom_to_list(N),
+ [S2, T] = string:split(S, "/", trailing),
+ {M, list_to_atom(S2), element(1, string:to_integer(T))}
end.
-ifdef(TEST).
diff --git a/lib/eunit/src/eunit_tty.erl b/lib/eunit/src/eunit_tty.erl
index 77a7cf1fd5..2c9a598628 100644
--- a/lib/eunit/src/eunit_tty.erl
+++ b/lib/eunit/src/eunit_tty.erl
@@ -235,7 +235,7 @@ print_test_error({skipped, Reason}, _) ->
format_skipped({module_not_found, M}) ->
io_lib:fwrite("missing module: ~w", [M]);
format_skipped({no_such_function, {M,F,A}}) ->
- io_lib:fwrite("no such function: ~w:~w/~w", [M,F,A]).
+ io_lib:fwrite("no such function: ~w:~tw/~w", [M,F,A]).
print_test_cancel(Reason) ->
fwrite(format_cancel(Reason)).
diff --git a/lib/hipe/cerl/cerl_cconv.erl b/lib/hipe/cerl/cerl_cconv.erl
index 122e6ef039..2cd0e261d5 100644
--- a/lib/hipe/cerl/cerl_cconv.erl
+++ b/lib/hipe/cerl/cerl_cconv.erl
@@ -258,7 +258,7 @@ bind_module_defs([], Env, S) ->
check_function_name(Name, S) ->
case s__is_function_name(Name, S) of
true ->
- error_msg("multiple definitions of function `~w'.", [Name]),
+ error_msg("multiple definitions of function `~tw'.", [Name]),
exit(error);
false ->
ok
diff --git a/lib/hipe/cerl/erl_types.erl b/lib/hipe/cerl/erl_types.erl
index 0883a69918..abb6c259f6 100644
--- a/lib/hipe/cerl/erl_types.erl
+++ b/lib/hipe/cerl/erl_types.erl
@@ -74,6 +74,7 @@
t_form_to_string/1,
t_from_form/6,
t_from_form_without_remote/3,
+ t_from_form_check_remote/4,
t_check_record_fields/6,
t_from_range/2,
t_from_range_unsafe/2,
@@ -1629,8 +1630,8 @@ lift_list_to_pos_empty(?list(Content, Termination, _)) ->
%% * The keys in Pairs are singleton types.
%% * The values of Pairs must not be unit, and may only be none if the
%% mandatoriness tag is 'optional'.
-%% * Optional must contain no pair {K,V} s.t. K is a subtype of DefaultKey and
-%% V is equal to DefaultKey.
+%% * There is no pair {K, 'optional', V} in Pairs s.t.
+%% K is a subtype of DefaultKey and V is equal to DefaultValue.
%% * DefaultKey must be the empty type iff DefaultValue is the empty type.
%% * DefaultKey must not be a singleton type.
%% * For every key K in Pairs, DefaultKey - K must not be representable; i.e.
@@ -4471,7 +4472,7 @@ t_from_form(Form, ExpTypes, Site, RecDict, VarTab, Cache) ->
%% Replace external types with with none().
-spec t_from_form_without_remote(parse_form(), site(), type_table()) ->
- {erl_type(), cache()}.
+ erl_type().
t_from_form_without_remote(Form, Site, TypeTable) ->
Module = site_module(Site),
@@ -4480,38 +4481,57 @@ t_from_form_without_remote(Form, Site, TypeTable) ->
VarTab = var_table__new(),
Cache0 = cache__new(),
Cache = Cache0#cache{mod_recs = {mrecs, ModRecs}},
- t_from_form1(Form, ExpTypes, Site, undefined, VarTab, Cache).
-
-%% REC_TYPE_LIMIT is used for limiting the depth of recursive types.
-%% EXPAND_LIMIT is used for limiting the size of types by
-%% limiting the number of elements of lists within one type form.
-%% EXPAND_DEPTH is used in conjunction with EXPAND_LIMIT to make the
-%% types balanced (unions will otherwise collapse to any()) by limiting
-%% the depth the same way as t_limit/2 does.
+ {Type, _} = t_from_form1(Form, ExpTypes, Site, undefined, VarTab, Cache),
+ Type.
-type expand_limit() :: integer().
-type expand_depth() :: integer().
--record(from_form, {site :: site(),
+-record(from_form, {site :: site() | {'check', mta()},
xtypes :: sets:set(mfa()) | 'replace_by_none',
mrecs :: 'undefined' | mod_type_table(),
vtab :: var_table(),
tnames :: type_names()}).
+-spec t_from_form_check_remote(parse_form(), sets:set(mfa()), mta(),
+ mod_type_table()) -> 'ok'.
+t_from_form_check_remote(Form, ExpTypes, MTA, RecDict) ->
+ State = #from_form{site = {check, MTA},
+ xtypes = ExpTypes,
+ mrecs = RecDict,
+ vtab = var_table__new(),
+ tnames = []},
+ D = (1 bsl 25), % unlimited
+ L = (1 bsl 25),
+ Cache0 = cache__new(),
+ _ = t_from_form2(Form, State, D, L, Cache0),
+ ok.
+
+%% REC_TYPE_LIMIT is used for limiting the depth of recursive types.
+%% EXPAND_LIMIT is used for limiting the size of types by
+%% limiting the number of elements of lists within one type form.
+%% EXPAND_DEPTH is used in conjunction with EXPAND_LIMIT to make the
+%% types balanced (unions will otherwise collapse to any()) by limiting
+%% the depth the same way as t_limit/2 does.
+
-spec t_from_form1(parse_form(), sets:set(mfa()) | 'replace_by_none',
site(), 'undefined' | mod_type_table(), var_table(),
cache()) -> {erl_type(), cache()}.
t_from_form1(Form, ET, Site, MR, V, C) ->
TypeNames = initial_typenames(Site),
+ D = ?EXPAND_DEPTH,
+ L = ?EXPAND_LIMIT,
State = #from_form{site = Site,
xtypes = ET,
mrecs = MR,
vtab = V,
tnames = TypeNames},
- L = ?EXPAND_LIMIT,
- {T0, L0, C0} = from_form(Form, State, ?EXPAND_DEPTH, L, C),
+ t_from_form2(Form, State, D, L, C).
+
+t_from_form2(Form, State, D, L, C) ->
+ {T0, L0, C0} = from_form(Form, State, D, L, C),
if
L0 =< 0 ->
{T1, _, C1} = from_form(Form, State, 1, L, C0),
@@ -4655,7 +4675,8 @@ from_form({type, _L, map, List}, S, D0, L, C) ->
end
end(List, L, C),
try
- {Pairs, DefK, DefV} = map_from_form(Pairs1, [], [], [], ?none, ?none),
+ Pairs2 = singleton_elements(Pairs1),
+ {Pairs, DefK, DefV} = map_from_form(Pairs2, [], [], [], ?none, ?none),
{t_map(Pairs, DefK, DefV), L5, C5}
catch none -> {t_none(), L5, C5}
end;
@@ -4767,14 +4788,18 @@ type_from_form(Name, Args, S, D, L, C) ->
case can_unfold_more(TypeName, TypeNames) of
true ->
{R, C1} = lookup_module_types(Module, MR, C),
- type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames,
+ type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames, Site,
S, D, L, C1);
false ->
{t_any(), L, C}
end.
-type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames, S, D, L, C) ->
+type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames, Site,
+ S, D, L, C) ->
case lookup_type(Name, ArgsLen, R) of
+ {_, {_, _}} when element(1, Site) =:= check ->
+ {_ArgTypes, L1, C1} = list_from_form(Args, S, D, L, C),
+ {t_any(), L1, C1};
{Tag, {{Module, _FileName, Form, ArgNames}, Type}} ->
NewTypeNames = [TypeName|TypeNames],
S1 = S#from_form{tnames = NewTypeNames},
@@ -4813,7 +4838,7 @@ type_from_form1(Name, Args, ArgsLen, R, TypeName, TypeNames, S, D, L, C) ->
end.
remote_from_form(RemMod, Name, Args, S, D, L, C) ->
- #from_form{xtypes = ET, mrecs = MR, tnames = TypeNames} = S,
+ #from_form{site = Site, xtypes = ET, mrecs = MR, tnames = TypeNames} = S,
if
ET =:= replace_by_none ->
{t_none(), L, C};
@@ -4831,7 +4856,7 @@ remote_from_form(RemMod, Name, Args, S, D, L, C) ->
case can_unfold_more(RemType, TypeNames) of
true ->
remote_from_form1(RemMod, Name, Args, ArgsLen, RemDict,
- RemType, TypeNames, S, D, L, C1);
+ RemType, TypeNames, Site, S, D, L, C1);
false ->
{t_any(), L, C1}
end;
@@ -4843,14 +4868,16 @@ remote_from_form(RemMod, Name, Args, S, D, L, C) ->
end.
remote_from_form1(RemMod, Name, Args, ArgsLen, RemDict, RemType, TypeNames,
- S, D, L, C) ->
+ Site, S, D, L, C) ->
case lookup_type(Name, ArgsLen, RemDict) of
+ {_, {_, _}} when element(1, Site) =:= check ->
+ {_ArgTypes, L1, C1} = list_from_form(Args, S, D, L, C),
+ {t_any(), L1, C1};
{Tag, {{Mod, _FileLine, Form, ArgNames}, Type}} ->
NewTypeNames = [RemType|TypeNames],
S1 = S#from_form{tnames = NewTypeNames},
{ArgTypes, L1, C1} = list_from_form(Args, S1, D, L, C),
CKey = cache_key(RemMod, Name, ArgTypes, TypeNames, D),
- %% case error of
case cache_find(CKey, C) of
{CachedType, DeltaL} ->
{CachedType, L - DeltaL, C};
@@ -4914,6 +4941,8 @@ record_from_form({atom, _, Name}, ModFields, S, D0, L0, C) ->
M = site_module(Site),
{R, C1} = lookup_module_types(M, MR, C),
case lookup_record(Name, R) of
+ {ok, _} when element(1, Site) =:= check ->
+ {t_any(), L0, C1};
{ok, DeclFields} ->
NewTypeNames = [RecordType|TypeNames],
Site1 = {record, {M, Name, length(DeclFields)}},
@@ -4998,6 +5027,30 @@ list_from_form([H|Tail], S, D, L, C) ->
{T1, L2, C2} = list_from_form(Tail, S, D, L1, C1),
{[H1|T1], L2, C2}.
+%% Separates singleton types in keys (see is_singleton_type/1).
+singleton_elements([]) ->
+ [];
+singleton_elements([{K,?mand,V}=Pair|Pairs]) ->
+ case is_singleton_type(K) of
+ true ->
+ [Pair|singleton_elements(Pairs)];
+ false ->
+ singleton_elements([{K,?opt,V}|Pairs])
+ end;
+singleton_elements([{Key0,MNess,Val}|Pairs]) ->
+ [{Key,MNess,Val} || Key <- separate_key(Key0)] ++ singleton_elements(Pairs).
+
+%% To be in sync with is_singleton_type/1.
+%% Does not separate tuples and maps as doing that has potential
+%% to be very expensive.
+separate_key(?atom(Atoms)) when Atoms =/= ?any ->
+ [t_atom(A) || A <- Atoms];
+separate_key(?number(_, _) = T) ->
+ t_elements(T);
+separate_key(?union(List)) ->
+ lists:append([separate_key(K) || K <- List, not t_is_none(K)]);
+separate_key(Key) -> [Key].
+
%% Sorts, combines non-singleton pairs, and applies precendence and
%% mandatoriness rules.
map_from_form([], ShdwPs, MKs, Pairs, DefK, DefV) ->
@@ -5447,7 +5500,8 @@ t_is_singleton(Type) ->
t_is_singleton(Type, Opaques) ->
do_opaque(Type, Opaques, fun is_singleton_type/1).
-%% Incomplete; not all representable singleton types are included.
+%% To be in sync with separate_key/1.
+%% Used to also recognize maps and tuples.
is_singleton_type(?nil) -> true;
is_singleton_type(?atom(?any)) -> false;
is_singleton_type(?atom(Set)) ->
@@ -5455,13 +5509,6 @@ is_singleton_type(?atom(Set)) ->
is_singleton_type(?int_range(V, V)) -> true;
is_singleton_type(?int_set(Set)) ->
ordsets:size(Set) =:= 1;
-is_singleton_type(?tuple(Types, Arity, _)) when is_integer(Arity) ->
- lists:all(fun is_singleton_type/1, Types);
-is_singleton_type(?tuple_set([{Arity, [OnlyTuple]}])) when is_integer(Arity) ->
- is_singleton_type(OnlyTuple);
-is_singleton_type(?map(Pairs, ?none, ?none)) ->
- lists:all(fun({_,MNess,V}) -> MNess =:= ?mand andalso is_singleton_type(V)
- end, Pairs);
is_singleton_type(_) ->
false.
diff --git a/lib/hipe/test/opt_verify_SUITE.erl b/lib/hipe/test/opt_verify_SUITE.erl
index 86083fa02b..a323c10503 100644
--- a/lib/hipe/test/opt_verify_SUITE.erl
+++ b/lib/hipe/test/opt_verify_SUITE.erl
@@ -44,7 +44,7 @@ call_elim(Config) ->
Icode5 = call_elim_test_file(Config, F3, icode_call_elim),
0 = substring_count(binary:bin_to_list(Icode5), "is_key"),
Icode6 = call_elim_test_file(Config, F3, no_icode_call_elim),
- 3 = substring_count(binary:bin_to_list(Icode6), "is_key"),
+ 2 = substring_count(binary:bin_to_list(Icode6), "is_key"),
ok.
call_elim_test_file(Config, FileName, Option) ->
diff --git a/lib/hipe/test/opt_verify_SUITE_data/call_elim_test_branches_opt_poss.erl b/lib/hipe/test/opt_verify_SUITE_data/call_elim_test_branches_opt_poss.erl
index c8ddfa1e75..12875f41af 100644
--- a/lib/hipe/test/opt_verify_SUITE_data/call_elim_test_branches_opt_poss.erl
+++ b/lib/hipe/test/opt_verify_SUITE_data/call_elim_test_branches_opt_poss.erl
@@ -6,17 +6,11 @@ test(A) ->
if A > 0 ->
true = has_a_field(#{a=>true}),
true = has_a_field(#{b=>1, a=>"2"}),
- true = has_a_field(#{a=>5, c=>4}),
- true = has_tuple_field(#{{ab, 1}=><<"qq">>, 1 =>0}),
- true = has_tuple_field(#{up =>down, {ab, 1}=>[]}),
- true = has_tuple_field(#{{ab, 1}=>42});
+ true = has_a_field(#{a=>5, c=>4});
A =< 0 ->
true = has_a_field(#{a=>q, 'A' =>nej}),
true = has_a_field(#{a=>"hej", false=>true}),
- true = has_a_field(#{a=>3}),
- true = has_tuple_field(#{{ab, 1}=>q, 'A' =>nej}),
- true = has_tuple_field(#{{ab, 1}=>"hej", false=>true}),
- true = has_tuple_field(#{{ab, 1}=>3})
+ true = has_a_field(#{a=>3})
end,
true = has_nil_field(#{[] =>3, b =>"seven"}),
true = has_nil_field(#{"seventeen"=>17, []=>nil}),
diff --git a/lib/inets/doc/src/httpc.xml b/lib/inets/doc/src/httpc.xml
index 66ec6cabd8..29e4b22632 100644
--- a/lib/inets/doc/src/httpc.xml
+++ b/lib/inets/doc/src/httpc.xml
@@ -408,7 +408,7 @@
<c>{self, once}</c>, the first message has an extra
element, that is, <c>{http, {RequestId, stream_start, Headers, Pid}}</c>.
This is the process id to be used as an argument to
- <c>http:stream_next/1</c> to trigger the next message to be sent to
+ <c>httpc:stream_next/1</c> to trigger the next message to be sent to
the calling process.</p>
<p>Notice that chunked encoding can add
headers so that there are more headers in the <c>stream_end</c>
diff --git a/lib/inets/doc/src/httpd.xml b/lib/inets/doc/src/httpd.xml
index d74635fc01..edf8731a82 100644
--- a/lib/inets/doc/src/httpd.xml
+++ b/lib/inets/doc/src/httpd.xml
@@ -279,7 +279,18 @@
requests defined by <c>max_keep_alive_requests</c>, the server
closes the connection. The server closes it even if there are
queued request. Default is no limit.</p>
- </item>
+ </item>
+
+
+ <tag><marker id="max_client_body_chunk"></marker>{max_client_body_chunk, integer()}</tag>
+ <item>
+ <p>Enforces chunking of a HTTP PUT or POST body data to be deliverd
+ to the mod_esi callback. Note this is not supported for mod_cgi.
+ Default is no limit e.i the whole body is deliverd as one entity, which could
+ be very memory consuming. <seealso marker="mod_esi">mod_esi(3)</seealso>.
+ </p>
+ </item>
+
</taglist>
<marker id="props_admin"></marker>
diff --git a/lib/inets/doc/src/mod_esi.xml b/lib/inets/doc/src/mod_esi.xml
index 46cc796c8a..a8393c9248 100644
--- a/lib/inets/doc/src/mod_esi.xml
+++ b/lib/inets/doc/src/mod_esi.xml
@@ -121,35 +121,60 @@
<funcs>
<func>
- <name>Module:Function(SessionID, Env, Input)-> _ </name>
+ <name>Module:Function(SessionID, Env, Input)-> {continue, State} | _ </name>
<fsummary>Creates a dynamic web page and returns it chunk by chunk
to the server process by calling <c>mod_esi:deliver/2</c>.</fsummary>
<type>
<v>SessionID = term()</v>
<v>Env = env()</v>
- <v>Input = string()</v>
+ <v>Input = string() | chunked_data()</v>
+ <v>chunked_data() = {first, Data::binary()} |
+ {continue, Data::binary(), State::term()} |
+ {last, Data::binary(), State::term()} </v>
+ <v>State = term()</v>
</type>
<desc>
<p><c>Module</c> must be found in the code path and export
<c>Function</c> with an arity of three. An <c>erlScriptAlias</c> must
also be set up in the configuration file for the web server.</p>
- <p>If the HTTP request is a 'post' request and a body is sent,
- <c>content_length</c> is the length of the posted
- data. If 'get' is used, <c>query_string</c> is the data after
- <em>?</em> in the URL.</p>
- <p><c>ParsedHeader</c> is the HTTP request as a key-value tuple
- list. The keys in <c>ParsedHeader</c> are in lower case.</p>
- <p><c>SessionID</c> is an identifier
- the server uses when <c>deliver/2</c> is called. Do not
- assume anything about the datatype.</p>
- <p>Use this callback function to generate dynamic web
- content dynamically. When a part of the page is generated, send the
- data back to the client through <c>deliver/2</c>. Notice
- that the first chunk of data sent to the client must at
- least contain all HTTP header fields that the response
- will generate. If the first chunk does not contain the
- <em>end of HTTP header</em>, that is, <c>"\r\n\r\n",</c>
- the server assumes that no HTTP header fields will be generated.</p>
+
+ <p><c>mod_esi:deliver/2</c> shall be used to generate the response
+ to the client and <c>SessionID</c> is an identifier that shall by used when
+ calling this function, do not assume anything about
+ the datatype. This function may be called
+ several times to chunk the the respons data. Notice that the
+ first chunk of data sent to the client must at least contain
+ all HTTP header fields that the response will generate. If the
+ first chunk does not contain the <em>end of HTTP header</em>,
+ that is, <c>"\r\n\r\n",</c> the server assumes that no HTTP
+ header fields will be generated.</p>
+
+ <p><c>Env</c> environment data of the request see description above.</p>
+
+ <p><c>Input</c> is query data of a GET request or the body of
+ a PUT or POST request. The default behavior (legacy reasons)
+ for delivering the body, is that the whole body is gathered and
+ converted to a string. But if the httpd config parameter
+ <seealso
+ marker="httpd#max_client_body_chunk">max_client_body_chunk</seealso>
+ is set, the body will be delivered as binary chunks
+ instead. The maximum size of the chunks is either <seealso
+ marker="httpd#max_client_body_chunk">max_client_body_chunk</seealso>
+ or decide by the client if it uses HTTP chunked encoding
+ to send the body. When using the chunking
+ mechanism this callback must return {continue, State::term()}
+ for all calls where <c>Input</c> is <c>{first,
+ Data::binary()}</c> or <c>{continue, Data::binary(),
+ State::term()}</c>. When <c>Input</c> is <c>{last,
+ Data::binary(), State::term()}</c> the return value will be ignored.</p>
+ <note><p>Note that if the body is
+ small all data may be delivered in only one chunk and then the
+ callback will be called with {last, Data::binary(), undefined}
+ without getting called with <c>{first,
+ Data::binary()}</c>.</p></note><p>The input <c>State</c> is
+ the last returned <c>State</c>, in it the callback can include
+ any data that it needs to keep track of when handling the chunks.
+ </p>
</desc>
</func>
@@ -159,14 +184,13 @@
This function is deprecated and is only kept for backwards compatibility.</fsummary>
<type>
<v>Env = env()</v>
- <v>Input = string()</v>
+ <v>Input = string() </v>
<v>Response = string()</v>
</type>
<desc>
<p>This callback format consumes much memory, as the
whole response must be generated before it is sent to the
- user. This function is deprecated and is only kept for backwards
- compatibility.
+ user. This callback format is deprecated.
For new development, use <c>Module:Function/3</c>.</p>
</desc>
</func>
diff --git a/lib/inets/doc/src/notes.xml b/lib/inets/doc/src/notes.xml
index 2f4f20347a..c85600d0be 100644
--- a/lib/inets/doc/src/notes.xml
+++ b/lib/inets/doc/src/notes.xml
@@ -33,7 +33,41 @@
<file>notes.xml</file>
</header>
- <section><title>Inets 6.4</title>
+ <section><title>Inets 6.4.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ http_uri aligned to follow RFC 3986 and not convert "+"
+ to space when decoding URIs.</p>
+ <p>
+ Own Id: OTP-14573</p>
+ </item>
+ </list>
+ </section>
+
+
+ <section><title>Improvements and New Features</title>
+ <list>
+ <item>
+ <p>
+ Added new option max_client_body_chunk to httpd server to
+ allow chunked delivery of PUT and POST data to mod_esi
+ callback. Note, new mod_esi callback implementation is
+ required.</p>
+ <p>
+ Also correct value provided by server_name environment
+ variable</p>
+ <p>
+ Own Id: OTP-14450</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
+<section><title>Inets 6.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
<list>
diff --git a/lib/inets/src/http_lib/http_uri.erl b/lib/inets/src/http_lib/http_uri.erl
index c4be5abd7c..7f1ca02014 100644
--- a/lib/inets/src/http_lib/http_uri.erl
+++ b/lib/inets/src/http_lib/http_uri.erl
@@ -117,8 +117,6 @@ decode(String) when is_list(String) ->
decode(String) when is_binary(String) ->
do_decode_binary(String).
-do_decode([$+|Rest]) ->
- [$ |do_decode(Rest)];
do_decode([$%,Hex1,Hex2|Rest]) ->
[hex2dec(Hex1)*16+hex2dec(Hex2)|do_decode(Rest)];
do_decode([First|Rest]) ->
@@ -126,8 +124,6 @@ do_decode([First|Rest]) ->
do_decode([]) ->
[].
-do_decode_binary(<<$+, Rest/bits>>) ->
- <<$ , (do_decode_binary(Rest))/binary>>;
do_decode_binary(<<$%, Hex:2/binary, Rest/bits>>) ->
<<(binary_to_integer(Hex, 16)), (do_decode_binary(Rest))/binary>>;
do_decode_binary(<<First:1/binary, Rest/bits>>) ->
diff --git a/lib/inets/src/http_server/httpd_example.erl b/lib/inets/src/http_server/httpd_example.erl
index c893b10dca..45b6deba97 100644
--- a/lib/inets/src/http_server/httpd_example.erl
+++ b/lib/inets/src/http_server/httpd_example.erl
@@ -22,7 +22,7 @@
-export([print/1]).
-export([get/2, put/2, post/2, yahoo/2, test1/2, get_bin/2, peer/2]).
--export([newformat/3]).
+-export([newformat/3, post_chunked/3]).
%% These are used by the inets test-suite
-export([delay/1, chunk_timeout/3]).
@@ -131,15 +131,31 @@ footer() ->
"</BODY>
</HTML>\n".
-
-newformat(SessionID, _Env, _Input)->
+post_chunked(_SessionID, _Env, {first, _Body} = _Bodychunk) ->
+ {continue, {state, 1}};
+post_chunked(_SessionID, _Env, {continue, _Body, {state, N}} = _Bodychunk) ->
+ {continue, {state, N+1}};
+post_chunked(SessionID, _Env, {last, _Body, {state, N}} = _Bodychunk) ->
+ mod_esi:deliver(SessionID, "Content-Type:text/html\r\n\r\n"),
+ mod_esi:deliver(SessionID, top("Received chunked body")),
+ mod_esi:deliver(SessionID, "Received" ++ integer_to_list(N) ++ "chunks"),
+ mod_esi:deliver(SessionID, footer());
+post_chunked(SessionID, _Env, {last, _Body, undefined} = _Bodychunk) ->
+ mod_esi:deliver(SessionID, "Content-Type:text/html\r\n\r\n"),
+ mod_esi:deliver(SessionID, top("Received chunked body")),
+ mod_esi:deliver(SessionID, "Received 1 chunk"),
+ mod_esi:deliver(SessionID, footer());
+post_chunked(_, _, _Body) ->
+ exit(body_not_chunked).
+
+newformat(SessionID,_,_) ->
mod_esi:deliver(SessionID, "Content-Type:text/html\r\n\r\n"),
mod_esi:deliver(SessionID, top("new esi format test")),
mod_esi:deliver(SessionID, "This new format is nice<BR>"),
mod_esi:deliver(SessionID, "This new format is nice<BR>"),
mod_esi:deliver(SessionID, "This new format is nice<BR>"),
mod_esi:deliver(SessionID, footer()).
-
+
%% ------------------------------------------------------
delay(Time) when is_integer(Time) ->
diff --git a/lib/inets/src/http_server/httpd_request.erl b/lib/inets/src/http_server/httpd_request.erl
index 749f58c197..0eaf073255 100644
--- a/lib/inets/src/http_server/httpd_request.erl
+++ b/lib/inets/src/http_server/httpd_request.erl
@@ -36,7 +36,7 @@
%% little at a time on a socket.
-export([
parse_method/1, parse_uri/1, parse_version/1, parse_headers/1,
- whole_body/1
+ whole_body/1, body_chunk_first/3, body_chunk/3, add_chunk/1
]).
@@ -76,13 +76,12 @@ body_data(Headers, Body) ->
ContentLength = list_to_integer(Headers#http_request_h.'content-length'),
case size(Body) - ContentLength of
0 ->
- {binary_to_list(Body), <<>>};
+ {Body, <<>>};
_ ->
<<BodyThisReq:ContentLength/binary, Next/binary>> = Body,
- {binary_to_list(BodyThisReq), Next}
+ {BodyThisReq, Next}
end.
-
%%-------------------------------------------------------------------------
%% validate(Method, Uri, Version) -> ok | {error, {bad_request, Reason} |
%% {error, {not_supported, {Method, Uri, Version}}
@@ -292,10 +291,46 @@ parse_headers(<<Octet, Rest/binary>>, Header, Headers, Current,
parse_headers(Rest, [Octet | Header], Headers, Current + 1, Max,
Options, Result).
+body_chunk_first(Body, 0 = Length, _) ->
+ whole_body(Body, Length);
+body_chunk_first(Body, Length, MaxChunk) ->
+ case body_chunk(Body, Length, MaxChunk) of
+ {ok, {last, NewBody}} ->
+ {ok, NewBody};
+ Other ->
+ Other
+ end.
+%% Used to chunk non chunk decoded post/put data
+add_chunk([<<>>, Body, Length, MaxChunk]) ->
+ body_chunk(Body, Length, MaxChunk);
+add_chunk([More, Body, Length, MaxChunk]) ->
+ body_chunk(<<Body/binary, More/binary>>, Length, MaxChunk).
+
+body_chunk(<<>> = Body, Length, MaxChunk) ->
+ {ok, {continue, ?MODULE, add_chunk, [Body, Length, MaxChunk]}};
+body_chunk(Body, Length, nolimit) ->
+ whole_body(Body, Length);
+
+body_chunk(Body, Length, MaxChunk) when Length > MaxChunk ->
+ case size(Body) >= MaxChunk of
+ true ->
+ <<Chunk:MaxChunk/binary, Rest/binary>> = Body,
+ {ok, {{continue, Chunk}, ?MODULE, add_chunk, [Rest, Length - MaxChunk, MaxChunk]}};
+ false ->
+ {ok, {continue, ?MODULE, add_chunk, [Body, Length, MaxChunk]}}
+ end;
+body_chunk(Body, Length, MaxChunk) ->
+ case size(Body) of
+ Length ->
+ {ok, {last, Body}};
+ _ ->
+ {ok, {continue, ?MODULE, add_chunk, [Body, Length, MaxChunk]}}
+ end.
+
whole_body(Body, Length) ->
case size(Body) of
N when N < Length, Length > 0 ->
- {?MODULE, whole_body, [Body, Length]};
+ {?MODULE, add_chunk, [Body, Length, nolimit]};
N when N >= Length, Length >= 0 ->
%% When a client uses pipelining trailing data
%% may be part of the next request!
@@ -443,6 +478,3 @@ check_header({"content-length", Value}, Maxsizes) ->
end;
check_header(_, _) ->
ok.
-
-
-
diff --git a/lib/inets/src/http_server/httpd_request_handler.erl b/lib/inets/src/http_server/httpd_request_handler.erl
index 538d52b98d..bd4fdd3832 100644
--- a/lib/inets/src/http_server/httpd_request_handler.erl
+++ b/lib/inets/src/http_server/httpd_request_handler.erl
@@ -49,7 +49,8 @@
headers, %% #http_request_h{}
body, %% binary()
data, %% The total data received in bits, checked after 10s
- byte_limit %% Bit limit per second before kick out
+ byte_limit, %% Bit limit per second before kick out
+ chunk
}).
%%====================================================================
@@ -124,7 +125,8 @@ continue_init(Manager, ConfigDB, SocketType, Socket, TimeOut) ->
NrOfRequest = max_keep_alive_request(ConfigDB),
MaxContentLen = max_content_length(ConfigDB),
Customize = customize(ConfigDB),
-
+ MaxChunk = max_client_body_chunk(ConfigDB),
+
{_, Status} = httpd_manager:new_connection(Manager),
MFA = {httpd_request, parse, [[{max_uri, MaxURISize}, {max_header, MaxHeaderSize},
@@ -139,7 +141,8 @@ continue_init(Manager, ConfigDB, SocketType, Socket, TimeOut) ->
status = Status,
timeout = TimeOut,
max_keep_alive_request = NrOfRequest,
- mfa = MFA},
+ mfa = MFA,
+ chunk = chunk_start(MaxChunk)},
http_transport:setopts(SocketType, Socket,
[binary, {packet, 0}, {active, once}]),
@@ -194,6 +197,7 @@ handle_cast(Msg, #state{mod = ModData} = State) ->
%%--------------------------------------------------------------------
handle_info({Proto, Socket, Data},
#state{mfa = {Module, Function, Args},
+ chunk = {ChunkState, _},
mod = #mod{socket_type = SockType,
socket = Socket} = ModData} = State)
when (((Proto =:= tcp) orelse
@@ -207,7 +211,8 @@ handle_info({Proto, Socket, Data},
_ ->
State#state.data + byte_size(Data)
end,
- case PROCESSED of
+
+ case PROCESSED of
{ok, Result} ->
NewState = case NewDataSize of
undefined ->
@@ -215,7 +220,7 @@ handle_info({Proto, Socket, Data},
_ ->
set_new_data_size(cancel_request_timeout(State), NewDataSize)
end,
- handle_http_msg(Result, NewState);
+ handle_msg(Result, NewState);
{error, {size_error, MaxSize, ErrCode, ErrStr}, Version} ->
NewModData = ModData#mod{http_version = Version},
httpd_response:send_status(NewModData, ErrCode, ErrStr),
@@ -224,7 +229,10 @@ handle_info({Proto, Socket, Data},
error_log(Reason, NewModData),
{stop, normal, State#state{response_sent = true,
mod = NewModData}};
-
+
+ {http_chunk = Module, Function, Args} when ChunkState =/= undefined ->
+ NewState = handle_chunk(Module, Function, Args, State),
+ {noreply, NewState};
NewMFA ->
http_transport:setopts(SockType, Socket, [{active, once}]),
case NewDataSize of
@@ -349,6 +357,34 @@ await_socket_ownership_transfer(AcceptTimeout) ->
exit(accept_socket_timeout)
end.
+
+%%% Internal chunking of client body
+handle_msg({{continue, Chunk}, Module, Function, Args}, #state{chunk = {_, CbState}} = State) ->
+ handle_internal_chunk(State#state{chunk = {continue, CbState},
+ body = Chunk}, Module, Function, Args);
+handle_msg({continue, Module, Function, Args}, #state{mod = ModData} = State) ->
+ http_transport:setopts(ModData#mod.socket_type,
+ ModData#mod.socket,
+ [{active, once}]),
+ {noreply, State#state{mfa = {Module, Function, Args}}};
+handle_msg({last, Body}, #state{headers = Headers, chunk = {_, CbState}} = State) ->
+ NewHeaders = Headers#http_request_h{'content-length' = integer_to_list(size(Body))},
+ handle_response(State#state{chunk = {last, CbState},
+ headers = NewHeaders,
+ body = Body});
+%%% Last data chunked by client
+handle_msg({ChunkedHeaders, Body}, #state{headers = Headers , chunk = {ChunkState, CbState}} = State) when ChunkState =/= undefined ->
+ NewHeaders = http_chunk:handle_headers(Headers, ChunkedHeaders),
+ handle_response(State#state{chunk = {last, CbState},
+ headers = NewHeaders,
+ body = Body});
+handle_msg({ChunkedHeaders, Body}, #state{headers = Headers , chunk = {undefined, _}} = State) ->
+ NewHeaders = http_chunk:handle_headers(Headers, ChunkedHeaders),
+ handle_response(State#state{headers = NewHeaders,
+ body = Body});
+handle_msg(Result, State) ->
+ handle_http_msg(Result, State).
+
handle_http_msg({_, _, Version, {_, _}, _},
#state{status = busy, mod = ModData} = State) ->
handle_manager_busy(State#state{mod =
@@ -405,10 +441,6 @@ handle_http_msg({Method, Uri, Version, {RecordHeaders, Headers}, Body},
error_log(Reason, ModData),
{stop, normal, State#state{response_sent = true}}
end;
-handle_http_msg({ChunkedHeaders, Body},
- State = #state{headers = Headers}) ->
- NewHeaders = http_chunk:handle_headers(Headers, ChunkedHeaders),
- handle_response(State#state{headers = NewHeaders, body = Body});
handle_http_msg(Body, State) ->
handle_response(State#state{body = Body}).
@@ -443,22 +475,25 @@ handle_body(#state{mod = #mod{config_db = ConfigDB}} = State) ->
end.
-handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
+handle_body(#state{headers = Headers, body = Body,
+ chunk = {ChunkState, CbState}, mod = #mod{config_db = ConfigDB} = ModData} = State,
MaxHeaderSize, MaxBodySize) ->
+ MaxChunk = max_client_body_chunk(ConfigDB),
case Headers#http_request_h.'transfer-encoding' of
"chunked" ->
try http_chunk:decode(Body, MaxBodySize, MaxHeaderSize) of
- {Module, Function, Args} ->
+ {Module, Function, Args} ->
http_transport:setopts(ModData#mod.socket_type,
ModData#mod.socket,
[{active, once}]),
{noreply, State#state{mfa =
- {Module, Function, Args}}};
- {ok, {ChunkedHeaders, NewBody}} ->
- NewHeaders =
- http_chunk:handle_headers(Headers, ChunkedHeaders),
- handle_response(State#state{headers = NewHeaders,
- body = NewBody})
+ {Module, Function, Args},
+ chunk = chunk_start(MaxChunk)}};
+ {ok, {ChunkedHeaders, NewBody}} ->
+ NewHeaders = http_chunk:handle_headers(Headers, ChunkedHeaders),
+ handle_response(State#state{headers = NewHeaders,
+ body = NewBody,
+ chunk = chunk_finish(ChunkState, CbState, MaxChunk)})
catch
throw:Error ->
httpd_response:send_status(ModData, 400,
@@ -476,21 +511,25 @@ handle_body(#state{headers = Headers, body = Body, mod = ModData} = State,
error_log(Reason, ModData),
{stop, normal, State#state{response_sent = true}};
_ ->
- Length = list_to_integer(Headers#http_request_h.'content-length'),
+ Length = list_to_integer(Headers#http_request_h.'content-length'),
+ MaxChunk = max_client_body_chunk(ConfigDB),
case ((Length =< MaxBodySize) or (MaxBodySize == nolimit)) of
true ->
- case httpd_request:whole_body(Body, Length) of
- {Module, Function, Args} ->
- http_transport:setopts(ModData#mod.socket_type,
+ case httpd_request:body_chunk_first(Body, Length, MaxChunk) of
+ {ok, {continue, Module, Function, Args}} ->
+ http_transport:setopts(ModData#mod.socket_type,
ModData#mod.socket,
[{active, once}]),
{noreply, State#state{mfa =
{Module, Function, Args}}};
-
- {ok, NewBody} ->
- handle_response(
- State#state{headers = Headers,
- body = NewBody})
+ {ok, {{continue, Chunk}, Module, Function, Args}} ->
+ handle_internal_chunk(State#state{chunk = chunk_start(MaxChunk),
+ body = Chunk}, Module, Function, Args);
+ {ok, NewBody} ->
+ handle_response(State#state{chunk = chunk_finish(ChunkState,
+ CbState, MaxChunk),
+ headers = Headers,
+ body = NewBody})
end;
false ->
httpd_response:send_status(ModData, 413, "Body too long"),
@@ -550,15 +589,61 @@ expect(Headers, _, ConfigDB) ->
end
end.
+handle_chunk(http_chunk = Module, decode_data = Function,
+ [ChunkSize, TotalChunk, {MaxBodySize, BodySoFar, _AccLength, MaxHeaderSize}],
+ #state{chunk = {_, CbState},
+ mod = #mod{socket_type = SockType,
+ socket = Socket} = ModData} = State) ->
+ {continue, NewCbState} = httpd_response:handle_continuation(ModData#mod{entity_body =
+ {continue, BodySoFar, CbState}}),
+ http_transport:setopts(SockType, Socket, [{active, once}]),
+ State#state{chunk = {continue, NewCbState}, mfa = {Module, Function, [ChunkSize, TotalChunk, {MaxBodySize, <<>>, 0, MaxHeaderSize}]}};
+
+handle_chunk(http_chunk = Module, decode_size = Function,
+ [Data, HexList, _AccSize, {MaxBodySize, BodySoFar, _AccLength, MaxHeaderSize}],
+ #state{chunk = {_, CbState},
+ mod = #mod{socket_type = SockType,
+ socket = Socket} = ModData} = State) ->
+ {continue, NewCbState} = httpd_response:handle_continuation(ModData#mod{entity_body = {continue, BodySoFar, CbState}}),
+ http_transport:setopts(SockType, Socket, [{active, once}]),
+ State#state{chunk = {continue, NewCbState}, mfa = {Module, Function, [Data, HexList, 0, {MaxBodySize, <<>>, 0, MaxHeaderSize}]}};
+handle_chunk(Module, Function, Args, #state{mod = #mod{socket_type = SockType,
+ socket = Socket}} = State) ->
+ http_transport:setopts(SockType, Socket, [{active, once}]),
+ State#state{mfa = {Module, Function, Args}}.
+
+handle_internal_chunk(#state{chunk = {ChunkState, CbState}, body = Chunk,
+ mod = #mod{socket_type = SockType,
+ socket = Socket} = ModData} = State, Module, Function, Args)->
+ Bodychunk = body_chunk(ChunkState, CbState, Chunk),
+ {continue, NewCbState} = httpd_response:handle_continuation(ModData#mod{entity_body = Bodychunk}),
+ case Args of
+ [<<>> | _] ->
+ http_transport:setopts(SockType, Socket, [{active, once}]),
+ {noreply, State#state{chunk = {continue, NewCbState}, mfa = {Module, Function, Args}}};
+ _ ->
+ handle_info({dummy, Socket, <<>>}, State#state{chunk = {continue, NewCbState},
+ mfa = {Module, Function, Args}})
+ end.
+
+handle_response(#state{body = Body,
+ headers = Headers,
+ mod = ModData,
+ chunk = {last, CbState},
+ max_keep_alive_request = Max} = State) when Max > 0 ->
+ {NewBody, Data} = httpd_request:body_data(Headers, Body),
+ ok = httpd_response:generate_and_send_response(
+ ModData#mod{entity_body = {last, NewBody, CbState}}),
+ handle_next_request(State#state{response_sent = true}, Data);
handle_response(#state{body = Body,
mod = ModData,
headers = Headers,
max_keep_alive_request = Max} = State) when Max > 0 ->
{NewBody, Data} = httpd_request:body_data(Headers, Body),
+ %% Backwards compatible, may cause memory explosion
ok = httpd_response:generate_and_send_response(
- ModData#mod{entity_body = NewBody}),
+ ModData#mod{entity_body = binary_to_list(NewBody)}),
handle_next_request(State#state{response_sent = true}, Data);
-
handle_response(#state{body = Body,
headers = Headers,
mod = ModData} = State) ->
@@ -578,6 +663,7 @@ handle_next_request(#state{mod = #mod{connection = true} = ModData,
MaxURISize = max_uri_size(ModData#mod.config_db),
MaxContentLen = max_content_length(ModData#mod.config_db),
Customize = customize(ModData#mod.config_db),
+ MaxChunk = max_client_body_chunk(ModData#mod.config_db),
MFA = {httpd_request, parse, [[{max_uri, MaxURISize}, {max_header, MaxHeaderSize},
{max_version, ?HTTP_MAX_VERSION_STRING},
@@ -590,6 +676,7 @@ handle_next_request(#state{mod = #mod{connection = true} = ModData,
max_keep_alive_request = decrease(Max),
headers = undefined,
body = undefined,
+ chunk = chunk_start(MaxChunk),
response_sent = false},
NewState = activate_request_timeout(TmpState),
@@ -647,6 +734,9 @@ error_log(ReasonString, #mod{config_db = ConfigDB}) ->
max_header_size(ConfigDB) ->
httpd_util:lookup(ConfigDB, max_header_size, ?HTTP_MAX_HEADER_SIZE).
+max_client_body_chunk(ConfigDB) ->
+ httpd_util:lookup(ConfigDB, max_client_body_chunk, nolimit).
+
max_uri_size(ConfigDB) ->
httpd_util:lookup(ConfigDB, max_uri_size, ?HTTP_MAX_URI_SIZE).
@@ -661,3 +751,17 @@ max_content_length(ConfigDB) ->
customize(ConfigDB) ->
httpd_util:lookup(ConfigDB, customize, httpd_custom).
+
+chunk_start(nolimit) ->
+ {undefined, undefined};
+chunk_start(_) ->
+ {first, undefined}.
+chunk_finish(_, _, nolimit) ->
+ {undefined, undefined};
+chunk_finish(_, CbState, _) ->
+ {last, CbState}.
+
+body_chunk(first, _, Chunk) ->
+ {first, Chunk};
+body_chunk(ChunkState, CbState, Chunk) ->
+ {ChunkState, Chunk, CbState}.
diff --git a/lib/inets/src/http_server/httpd_response.erl b/lib/inets/src/http_server/httpd_response.erl
index effa273e92..6b9053fda6 100644
--- a/lib/inets/src/http_server/httpd_response.erl
+++ b/lib/inets/src/http_server/httpd_response.erl
@@ -21,7 +21,7 @@
-module(httpd_response).
-export([generate_and_send_response/1, send_status/3, send_header/3,
send_body/3, send_chunk/3, send_final_chunk/2, send_final_chunk/3,
- split_header/2, is_disable_chunked_send/1, cache_headers/2]).
+ split_header/2, is_disable_chunked_send/1, cache_headers/2, handle_continuation/1]).
-export([map_status_code/2]).
-include_lib("inets/src/inets_app/inets_internal.hrl").
@@ -31,6 +31,9 @@
-define(VMODULE,"RESPONSE").
+handle_continuation(Mod) ->
+ generate_and_send_response(Mod).
+
%% If peername does not exist the client already discarded the
%% request so we do not need to send a reply.
generate_and_send_response(#mod{init_data =
@@ -39,6 +42,8 @@ generate_and_send_response(#mod{init_data =
generate_and_send_response(#mod{config_db = ConfigDB} = ModData) ->
Modules = httpd_util:lookup(ConfigDB, modules, ?DEFAULT_MODS),
case traverse_modules(ModData, Modules) of
+ {continue, _} = Continue ->
+ Continue;
done ->
ok;
{proceed, Data} ->
@@ -69,17 +74,15 @@ generate_and_send_response(#mod{config_db = ConfigDB} = ModData) ->
traverse_modules(ModData,[]) ->
{proceed,ModData#mod.data};
traverse_modules(ModData,[Module|Rest]) ->
- ?hdrd("traverse modules", [{callback_module, Module}]),
try apply(Module, do, [ModData]) of
+ {continue, _} = Continue ->
+ Continue;
done ->
- ?hdrt("traverse modules - done", []),
- done;
+ done;
{break, NewData} ->
- ?hdrt("traverse modules - break", [{new_data, NewData}]),
- {proceed, NewData};
+ {proceed, NewData};
{proceed, NewData} ->
- ?hdrt("traverse modules - proceed", [{new_data, NewData}]),
- traverse_modules(ModData#mod{data = NewData}, Rest)
+ traverse_modules(ModData#mod{data = NewData}, Rest)
catch
T:E ->
String =
@@ -104,15 +107,10 @@ send_status(#mod{socket_type = SocketType,
socket = Socket,
config_db = ConfigDB} = ModData, StatusCode, PhraseArgs) ->
- ?hdrd("send status", [{status_code, StatusCode},
- {phrase_args, PhraseArgs}]),
-
ReasonPhrase = httpd_util:reason_phrase(StatusCode),
Message = httpd_util:message(StatusCode, PhraseArgs, ConfigDB),
Body = get_body(ReasonPhrase, Message),
- ?hdrt("send status - header", [{reason_phrase, ReasonPhrase},
- {message, Message}]),
send_header(ModData, StatusCode,
[{content_type, "text/html"},
{content_length, integer_to_list(length(Body))}]),
diff --git a/lib/inets/src/http_server/httpd_script_env.erl b/lib/inets/src/http_server/httpd_script_env.erl
index e15613273e..055f08fdb0 100644
--- a/lib/inets/src/http_server/httpd_script_env.erl
+++ b/lib/inets/src/http_server/httpd_script_env.erl
@@ -74,9 +74,13 @@ which_peercert(#mod{socket_type = {Type, _}, socket = Socket}) when Type == essl
which_peercert(_) -> %% Not an ssl connection
undefined.
+
which_resolve(#mod{init_data = #init_data{resolve = Resolve}}) ->
Resolve.
+which_name(#mod{config_db = ConfigDB}) ->
+ httpd_util:lookup(ConfigDB, server_name).
+
which_method(#mod{method = Method}) ->
Method.
@@ -85,7 +89,8 @@ which_request_uri(#mod{request_uri = RUri}) ->
create_basic_elements(esi, ModData) ->
[{server_software, which_server(ModData)},
- {server_name, which_resolve(ModData)},
+ {server_name, which_name(ModData)},
+ {host_name, which_resolve(ModData)},
{gateway_interface, ?GATEWAY_INTERFACE},
{server_protocol, ?SERVER_PROTOCOL},
{server_port, which_port(ModData)},
@@ -96,7 +101,8 @@ create_basic_elements(esi, ModData) ->
create_basic_elements(cgi, ModData) ->
[{"SERVER_SOFTWARE", which_server(ModData)},
- {"SERVER_NAME", which_resolve(ModData)},
+ {"SERVER_NAME", which_name(ModData)},
+ {"HOST_NAME", which_resolve(ModData)},
{"GATEWAY_INTERFACE", ?GATEWAY_INTERFACE},
{"SERVER_PROTOCOL", ?SERVER_PROTOCOL},
{"SERVER_PORT", integer_to_list(which_port(ModData))},
diff --git a/lib/inets/src/http_server/mod_disk_log.erl b/lib/inets/src/http_server/mod_disk_log.erl
index 3be5f2dd74..2023546f01 100644
--- a/lib/inets/src/http_server/mod_disk_log.erl
+++ b/lib/inets/src/http_server/mod_disk_log.erl
@@ -363,17 +363,21 @@ create_disk_log(Filename, MaxBytes, MaxFiles, ConfigList) ->
%%----------------------------------------------------------------------
open(Filename, MaxBytes, MaxFiles, internal) ->
- Opts = [{format, internal}, {repair, truncate}],
- open1(Filename, MaxBytes, MaxFiles, Opts);
+ Opt0 = {format, internal},
+ Opts1 = [Opt0, {repair, true}],
+ Opts2 = [Opt0, {repair, truncate}],
+ open1(Filename, MaxBytes, MaxFiles, Opts1, Opts2);
open(Filename, MaxBytes, MaxFiles, _) ->
Opts = [{format, external}],
- open1(Filename, MaxBytes, MaxFiles, Opts).
+ open1(Filename, MaxBytes, MaxFiles, Opts, Opts).
-open1(Filename, MaxBytes, MaxFiles, Opts0) ->
- Opts1 = [{name, Filename}, {file, Filename}, {type, wrap}] ++ Opts0,
- case open2(Opts1, {MaxBytes, MaxFiles}) of
+open1(Filename, MaxBytes, MaxFiles, Opts1, Opts2) ->
+ Opts0 = [{name, Filename}, {file, Filename}, {type, wrap}],
+ case open2(Opts0 ++ Opts1, Opts0 ++ Opts2, {MaxBytes, MaxFiles}) of
{ok, LogDB} ->
{ok, LogDB};
+ {repaired, LogDB, {recovered, _}, {badbytes, _}} ->
+ {ok, LogDB};
{error, Reason} ->
{error,
?NICE("Can't create " ++ Filename ++
@@ -382,11 +386,16 @@ open1(Filename, MaxBytes, MaxFiles, Opts0) ->
{error, ?NICE("Can't create "++Filename)}
end.
-open2(Opts, Size) ->
- case disk_log:open(Opts) of
+open2(Opts1, Opts2, Size) ->
+ case disk_log:open(Opts1) of
{error, {badarg, size}} ->
%% File did not exist, add the size option and try again
- disk_log:open([{size, Size} | Opts]);
+ disk_log:open([{size, Size} | Opts1]);
+ {error, {Reason, _}} when
+ Reason == not_a_log_file;
+ Reason == invalid_index_file ->
+ %% File was corrupt, add the truncate option and try again
+ disk_log:open([{size, Size} | Opts2]);
Else ->
Else
end.
diff --git a/lib/inets/src/http_server/mod_esi.erl b/lib/inets/src/http_server/mod_esi.erl
index b21af1418c..3a589ca5f0 100644
--- a/lib/inets/src/http_server/mod_esi.erl
+++ b/lib/inets/src/http_server/mod_esi.erl
@@ -31,7 +31,6 @@
-include("httpd.hrl").
-include("httpd_internal.hrl").
--include("inets_internal.hrl").
-define(VMODULE,"ESI").
-define(DEFAULT_ERL_TIMEOUT,15000).
@@ -69,7 +68,6 @@ deliver(_SessionID, _Data) ->
%% Description: See httpd(3) ESWAPI CALLBACK FUNCTIONS
%%-------------------------------------------------------------------------
do(ModData) ->
- ?hdrt("do", []),
case proplists:get_value(status, ModData#mod.data) of
{_StatusCode, _PhraseArgs, _Reason} ->
{proceed, ModData#mod.data};
@@ -190,7 +188,6 @@ store({erl_script_nocache, Value}, _) ->
%%% Internal functions
%%%========================================================================
generate_response(ModData) ->
- ?hdrt("generate response", []),
case scheme(ModData#mod.request_uri, ModData#mod.config_db) of
{eval, ESIBody, Modules} ->
eval(ModData, ESIBody, Modules);
@@ -242,7 +239,6 @@ alias_match_str(Alias, eval_script_alias) ->
erl(#mod{method = Method} = ModData, ESIBody, Modules)
when (Method =:= "GET") orelse (Method =:= "HEAD") orelse (Method =:= "DELETE") ->
- ?hdrt("erl", [{method, Method}]),
case httpd_util:split(ESIBody,":|%3A|/",2) of
{ok, [ModuleName, FuncAndInput]} ->
case httpd_util:split(FuncAndInput,"[\?/]",2) of
@@ -273,14 +269,12 @@ erl(#mod{method = "PUT", entity_body = Body} = ModData,
generate_webpage(ModData, ESIBody, Modules,
list_to_atom(ModuleName),
FunctionName, {Input,Body},
- [{entity_body, Body} |
- script_elements(FuncAndInput, Input)]);
+ script_elements(FuncAndInput, Input));
{ok, [FunctionName]} ->
generate_webpage(ModData, ESIBody, Modules,
list_to_atom(ModuleName),
FunctionName, {undefined,Body},
- [{entity_body, Body} |
- script_elements(FuncAndInput, "")]);
+ script_elements(FuncAndInput, ""));
{ok, BadRequest} ->
{proceed,[{status,{400,none, BadRequest}} |
ModData#mod.data]}
@@ -290,12 +284,11 @@ erl(#mod{method = "PUT", entity_body = Body} = ModData,
end;
erl(#mod{method = "POST", entity_body = Body} = ModData, ESIBody, Modules) ->
- ?hdrt("erl", [{method, post}]),
case httpd_util:split(ESIBody,":|%3A|/",2) of
{ok,[ModuleName, Function]} ->
generate_webpage(ModData, ESIBody, Modules,
list_to_atom(ModuleName),
- Function, Body, [{entity_body, Body}]);
+ Function, Body, []);
{ok, BadRequest} ->
{proceed,[{status, {400, none, BadRequest}} | ModData#mod.data]}
end;
@@ -304,7 +297,6 @@ erl(#mod{request_uri = ReqUri,
method = "PATCH",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
- ?hdrt("erl", [{method, patch}]),
{proceed, [{status,{501,{"PATCH", ReqUri, Version},
?NICE("Erl mechanism doesn't support method PATCH")}}|
Data]}.
@@ -315,7 +307,6 @@ generate_webpage(ModData, ESIBody, [all], Module, FunctionName,
FunctionName, Input, ScriptElements);
generate_webpage(ModData, ESIBody, Modules, Module, FunctionName,
Input, ScriptElements) ->
- ?hdrt("generate webpage", []),
Function = list_to_atom(FunctionName),
case lists:member(Module, Modules) of
true ->
@@ -337,7 +328,6 @@ generate_webpage(ModData, ESIBody, Modules, Module, FunctionName,
%% Old API that waits for the dymnamic webpage to be totally generated
%% before anythig is sent back to the client.
erl_scheme_webpage_whole(Mod, Func, Env, Input, ModData) ->
- ?hdrt("erl_scheme_webpage_whole", [{module, Mod}, {function, Func}]),
case (catch Mod:Func(Env, Input)) of
{'EXIT',{undef, _}} ->
{proceed, [{status, {404, ModData#mod.request_uri, "Not found"}}
@@ -375,7 +365,6 @@ erl_scheme_webpage_whole(Mod, Func, Env, Input, ModData) ->
%% in small chunks at the time during generation.
erl_scheme_webpage_chunk(Mod, Func, Env, Input, ModData) ->
process_flag(trap_exit, true),
- ?hdrt("erl_scheme_webpage_chunk", [{module, Mod}, {function, Func}]),
Self = self(),
%% Spawn worker that generates the webpage.
%% It would be nicer to use erlang:function_exported/3 but if the
@@ -386,7 +375,9 @@ erl_scheme_webpage_chunk(Mod, Func, Env, Input, ModData) ->
{'EXIT', {undef,_}} ->
%% Will force fallback on the old API
exit(erl_scheme_webpage_chunk_undefined);
- _ ->
+ {continue, _} = Continue ->
+ exit(Continue);
+ _ ->
ok
end
end),
@@ -400,13 +391,12 @@ deliver_webpage_chunk(#mod{config_db = Db} = ModData, Pid) ->
deliver_webpage_chunk(ModData, Pid, Timeout).
deliver_webpage_chunk(#mod{config_db = Db} = ModData, Pid, Timeout) ->
- ?hdrt("deliver_webpage_chunk", [{timeout, Timeout}]),
case receive_headers(Timeout) of
{error, Reason} ->
%% Happens when webpage generator callback/3 is undefined
- ?hdrv("deliver_webpage_chunk - failed receiving headers",
- [{reason, Reason}]),
{error, Reason};
+ {continue, _} = Continue ->
+ Continue;
{Headers, Body} ->
case httpd_esi:handle_headers(Headers) of
{proceed, AbsPath} ->
@@ -430,7 +420,6 @@ deliver_webpage_chunk(#mod{config_db = Db} = ModData, Pid, Timeout) ->
IsDisableChunkedSend)
end;
timeout ->
- ?hdrv("deliver_webpage_chunk - timeout", []),
send_headers(ModData, 504, [{"connection", "close"}]),
httpd_socket:close(ModData#mod.socket_type, ModData#mod.socket),
{proceed,[{response, {already_sent, 200, 0}} | ModData#mod.data]}
@@ -439,16 +428,14 @@ deliver_webpage_chunk(#mod{config_db = Db} = ModData, Pid, Timeout) ->
receive_headers(Timeout) ->
receive
{esi_data, Chunk} ->
- ?hdrt("receive_headers - received esi data (esi)", []),
httpd_esi:parse_headers(lists:flatten(Chunk));
{ok, Chunk} ->
- ?hdrt("receive_headers - received esi data (ok)", []),
httpd_esi:parse_headers(lists:flatten(Chunk));
{'EXIT', Pid, erl_scheme_webpage_chunk_undefined} when is_pid(Pid) ->
- ?hdrd("receive_headers - exit:chunk-undef", []),
{error, erl_scheme_webpage_chunk_undefined};
- {'EXIT', Pid, Reason} when is_pid(Pid) ->
- ?hdrv("receive_headers - exit", [{reason, Reason}]),
+ {'EXIT', Pid, {continue, _} = Continue} when is_pid(Pid) ->
+ Continue;
+ {'EXIT', Pid, Reason} when is_pid(Pid) ->
exit({mod_esi_linked_process_died, Pid, Reason})
after Timeout ->
timeout
@@ -463,7 +450,6 @@ handle_body(_, #mod{method = "HEAD"} = ModData, _, _, Size, _) ->
{proceed, [{response, {already_sent, 200, Size}} | ModData#mod.data]};
handle_body(Pid, ModData, Body, Timeout, Size, IsDisableChunkedSend) ->
- ?hdrt("handle_body - send chunk", [{timeout, Timeout}, {size, Size}]),
httpd_response:send_chunk(ModData, Body, IsDisableChunkedSend),
receive
{esi_data, Data} when is_binary(Data) ->
@@ -543,7 +529,6 @@ eval(#mod{request_uri = ReqUri,
method = "PUT",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
- ?hdrt("eval", [{method, put}]),
{proceed,[{status,{501,{"PUT", ReqUri, Version},
?NICE("Eval mechanism doesn't support method PUT")}}|
Data]};
@@ -552,7 +537,6 @@ eval(#mod{request_uri = ReqUri,
method = "DELETE",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
- ?hdrt("eval", [{method, delete}]),
{proceed,[{status,{501,{"DELETE", ReqUri, Version},
?NICE("Eval mechanism doesn't support method DELETE")}}|
Data]};
@@ -561,14 +545,12 @@ eval(#mod{request_uri = ReqUri,
method = "POST",
http_version = Version,
data = Data}, _ESIBody, _Modules) ->
- ?hdrt("eval", [{method, post}]),
{proceed,[{status,{501,{"POST", ReqUri, Version},
?NICE("Eval mechanism doesn't support method POST")}}|
Data]};
eval(#mod{method = Method} = ModData, ESIBody, Modules)
when (Method =:= "GET") orelse (Method =:= "HEAD") ->
- ?hdrt("eval", [{method, Method}]),
case is_authorized(ESIBody, Modules) of
true ->
case generate_webpage(ESIBody) of
diff --git a/lib/inets/src/http_server/mod_log.erl b/lib/inets/src/http_server/mod_log.erl
index ad7e9713d9..ec570504be 100644
--- a/lib/inets/src/http_server/mod_log.erl
+++ b/lib/inets/src/http_server/mod_log.erl
@@ -105,8 +105,8 @@ do(Info) ->
Code = proplists:get_value(code,Head,unknown),
transfer_log(Info, "-", AuthUser, Date, Code, Size),
{proceed, Info#mod.data};
- {_StatusCode, Response} ->
- transfer_log(Info,"-",AuthUser,Date,200,
+ {StatusCode, Response} ->
+ transfer_log(Info, "-", AuthUser, Date, StatusCode,
httpd_util:flatlength(Response)),
{proceed,Info#mod.data};
undefined ->
diff --git a/lib/inets/src/inets_app/inets.appup.src b/lib/inets/src/inets_app/inets.appup.src
index f9ad8709d9..a86413147c 100644
--- a/lib/inets/src/inets_app/inets.appup.src
+++ b/lib/inets/src/inets_app/inets.appup.src
@@ -18,14 +18,10 @@
%% %CopyrightEnd%
{"%VSN%",
[
- {<<"6.2.4">>, [{load_module, httpd_request_handler,
- soft_purge, soft_purge, []}]},
{<<"6\\..*">>,[{restart_application, inets}]},
{<<"5\\..*">>,[{restart_application, inets}]}
],
[
- {<<"6.2.4">>, [{load_module, httpd_request_handler,
- soft_purge, soft_purge, []}]},
{<<"6\\..*">>,[{restart_application, inets}]},
{<<"5\\..*">>,[{restart_application, inets}]}
]
diff --git a/lib/inets/test/httpd_SUITE.erl b/lib/inets/test/httpd_SUITE.erl
index 055b847319..6c8728470b 100644
--- a/lib/inets/test/httpd_SUITE.erl
+++ b/lib/inets/test/httpd_SUITE.erl
@@ -73,6 +73,8 @@ all() ->
{group, http_reload},
{group, https_reload},
{group, http_mime_types},
+ {group, http_logging},
+ {group, http_post},
mime_types_format
].
@@ -96,8 +98,10 @@ groups() ->
{https_htaccess, [], [{group, htaccess}]},
{http_security, [], [{group, security}]},
{https_security, [], [{group, security}]},
+ {http_logging, [], [{group, logging}]},
{http_reload, [], [{group, reload}]},
{https_reload, [], [{group, reload}]},
+ {http_post, [], [{group, post}]},
{http_mime_types, [], [alias_1_1, alias_1_0, alias_0_9]},
{limit, [], [max_clients_1_1, max_clients_1_0, max_clients_0_9]},
{custom, [], [customize, add_default]},
@@ -110,6 +114,7 @@ groups() ->
disturbing_1_0,
disturbing_0_9
]},
+ {post, [], [chunked_post, chunked_chunked_encoded_post]},
{basic_auth, [], [basic_auth_1_1, basic_auth_1_0, basic_auth_0_9]},
{auth_api, [], [auth_api_1_1, auth_api_1_0, auth_api_0_9
]},
@@ -119,6 +124,8 @@ groups() ->
]},
{htaccess, [], [htaccess_1_1, htaccess_1_0, htaccess_0_9]},
{security, [], [security_1_1, security_1_0]}, %% Skip 0.9 as causes timing issus in test code
+ {logging, [], [disk_log_internal, disk_log_exists,
+ disk_log_bad_size, disk_log_bad_file]},
{http_1_1, [],
[host, chunked, expect, cgi, cgi_chunked_encoding_test,
trace, range, if_modified_since, mod_esi_chunk_timeout,
@@ -148,6 +155,7 @@ http_get() ->
ipv6
].
+
load() ->
[light, medium
%%,heavy
@@ -214,6 +222,7 @@ init_per_group(Group, Config0) when Group == http_basic;
Group == http_auth_api_mnesia;
Group == http_security;
Group == http_reload;
+ Group == http_post;
Group == http_mime_types
->
ok = start_apps(Group),
@@ -254,6 +263,11 @@ init_per_group(auth_api_dets, Config) ->
init_per_group(auth_api_mnesia, Config) ->
start_mnesia(proplists:get_value(node, Config)),
[{auth_prefix, "mnesia_"} | Config];
+init_per_group(http_logging, Config) ->
+ Config1 = [{http_version, "HTTP/1.1"} | Config],
+ ServerRoot = proplists:get_value(server_root, Config1),
+ Path = ServerRoot ++ "/httpd_log_transfer",
+ [{transfer_log, Path} | Config1];
init_per_group(_, Config) ->
Config.
@@ -266,6 +280,7 @@ end_per_group(Group, _Config) when Group == http_basic;
Group == http_htaccess;
Group == http_security;
Group == http_reload;
+ Group == http_post;
Group == http_mime_types
->
inets:stop();
@@ -290,7 +305,7 @@ end_per_group(_, _) ->
%%--------------------------------------------------------------------
init_per_testcase(Case, Config) when Case == host; Case == trace ->
- ct:timetrap({seconds, 20}),
+ ct:timetrap({seconds, 40}),
Prop = proplists:get_value(tc_group_properties, Config),
Name = proplists:get_value(name, Prop),
Cb = case Name of
@@ -310,10 +325,60 @@ init_per_testcase(range, Config) ->
create_range_data(DocRoot),
dbg(range, Config, init);
+init_per_testcase(disk_log_internal, Config0) ->
+ ok = start_apps(http_logging),
+ Config1 = init_httpd(http_logging, [{type, ip_comm} | Config0]),
+ ct:timetrap({seconds, 20}),
+ dbg(disk_log_internal, Config1, init);
+
+init_per_testcase(disk_log_exists, Config0) ->
+ ServerRoot = proplists:get_value(server_root, Config0),
+ Filename = ServerRoot ++ "/httpd_log_transfer",
+ {ok, Log} = disk_log:open([{name, Filename}, {file, Filename},
+ {repair, truncate}, {format, internal},
+ {type, wrap}, {size, {1048576, 5}}]),
+ ok = disk_log:log(Log, {bogus, node(), self()}),
+ ok = disk_log:close(Log),
+ ok = start_apps(http_logging),
+ Config1 = init_httpd(http_logging, [{type, ip_comm} | Config0]),
+ ct:timetrap({seconds, 20}),
+ dbg(disk_log_internal, Config1, init);
+
+init_per_testcase(disk_log_bad_size, Config0) ->
+ ServerRoot = proplists:get_value(server_root, Config0),
+ Filename = ServerRoot ++ "/httpd_log_transfer",
+ {ok, Log} = disk_log:open([{name, Filename}, {file, Filename},
+ {repair, truncate}, {format, internal},
+ {type, wrap}, {size, {1048576, 5}}]),
+ ok = disk_log:log(Log, {bogus, node(), self()}),
+ ok = disk_log:close(Log),
+ ok = file:delete(Filename ++ ".siz"),
+ ok = start_apps(http_logging),
+ Config1 = init_httpd(http_logging, [{type, ip_comm} | Config0]),
+ ct:timetrap({seconds, 20}),
+ dbg(disk_log_internal, Config1, init);
+
+init_per_testcase(disk_log_bad_file, Config0) ->
+ ServerRoot = proplists:get_value(server_root, Config0),
+ Filename = ServerRoot ++ "/httpd_log_transfer",
+ ok = file:write_file(Filename ++ ".1", <<>>),
+ ok = start_apps(http_logging),
+ Config1 = init_httpd(http_logging, [{type, ip_comm} | Config0]),
+ ct:timetrap({seconds, 20}),
+ dbg(disk_log_internal, Config1, init);
+
init_per_testcase(Case, Config) ->
ct:timetrap({seconds, 20}),
dbg(Case, Config, init).
+end_per_testcase(Case, Config) when
+ Case == disk_log_internal;
+ Case == disk_log_exists;
+ Case == disk_log_bad_size;
+ Case == disk_log_bad_file ->
+ inets:stop(),
+ dbg(Case, Config, 'end');
+
end_per_testcase(Case, Config) ->
dbg(Case, Config, 'end').
@@ -618,6 +683,51 @@ ipv6(Config) when is_list(Config) ->
end.
%%-------------------------------------------------------------------------
+chunked_post() ->
+ [{doc,"Test option max_client_body_chunk"}].
+chunked_post(Config) when is_list(Config) ->
+ ok = http_status("POST /cgi-bin/erl/httpd_example:post_chunked ",
+ {"Content-Length:833 \r\n",
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"
+ "ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"},
+ [{http_version, "HTTP/1.1"} |Config],
+ [{statuscode, 200}]),
+ ok = http_status("POST /cgi-bin/erl/httpd_example:post_chunked ",
+ {"Content-Length:2 \r\n",
+ "ZZ"
+ },
+ [{http_version, "HTTP/1.1"} |Config],
+ [{statuscode, 200}]).
+
+chunked_chunked_encoded_post() ->
+ [{doc,"Test option max_client_body_chunk with chunked client encoding"}].
+chunked_chunked_encoded_post(Config) when is_list(Config) ->
+ Chunk = http_chunk:encode("ZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ"),
+ LastChunk = http_chunk:encode_last(),
+ Chunks = lists:duplicate(10000, Chunk),
+ ok = http_status("POST /cgi-bin/erl/httpd_example:post_chunked ",
+ {"Transfer-Encoding:chunked \r\n",
+ [Chunks | LastChunk]},
+ [{http_version, "HTTP/1.1"} | Config],
+ [{statuscode, 200}]).
+
+
+%%-------------------------------------------------------------------------
htaccess_1_1(Config) when is_list(Config) ->
htaccess([{http_version, "HTTP/1.1"} | Config]).
@@ -1257,6 +1367,63 @@ security(Config) ->
true = unblock_user(Node, "two", Port, OpenDir).
%%-------------------------------------------------------------------------
+
+disk_log_internal() ->
+ ["Test mod_disk_log"].
+
+disk_log_internal(Config) ->
+ Version = proplists:get_value(http_version, Config),
+ Request = "GET /" ++ integer_to_list(rand:uniform(1000000)) ++ " ",
+ ok = http_status(Request, Config, [{statuscode, 404}]),
+ Log = proplists:get_value(transfer_log, Config),
+ Match = list_to_binary(Request ++ Version),
+ disk_log_internal1(Log, Match, disk_log:chunk(Log, start)).
+disk_log_internal1(_, _, eof) ->
+ ct:fail(eof);
+disk_log_internal1(Log, Match, {Cont, [H | T]}) ->
+ case binary:match(H, Match) of
+ nomatch ->
+ disk_log_internal1(Log, Match, {Cont, T});
+ _ ->
+ ok
+ end;
+disk_log_internal1(Log, Match, {Cont, []}) ->
+ disk_log_internal1(Log, Match, disk_log:chunk(Log, Cont)).
+
+disk_log_exists() ->
+ ["Test mod_disk_log with existing logs"].
+
+disk_log_exists(Config) ->
+ Log = proplists:get_value(transfer_log, Config),
+ Self = self(),
+ Node = node(),
+ Log = proplists:get_value(transfer_log, Config),
+ {_, [{bogus, Node, Self} | _]} = disk_log:chunk(Log, start).
+
+disk_log_bad_size() ->
+ ["Test mod_disk_log with existing log, missing .siz"].
+
+disk_log_bad_size(Config) ->
+ Log = proplists:get_value(transfer_log, Config),
+ Self = self(),
+ Node = node(),
+ Log = proplists:get_value(transfer_log, Config),
+ {_, [{bogus, Node, Self} | _]} = disk_log:chunk(Log, start).
+
+disk_log_bad_file() ->
+ ["Test mod_disk_log with bad file"].
+
+disk_log_bad_file(Config) ->
+ Log = proplists:get_value(transfer_log, Config),
+ Version = proplists:get_value(http_version, Config),
+ Request = "GET /" ++ integer_to_list(rand:uniform(1000000)) ++ " ",
+ ok = http_status(Request, Config, [{statuscode, 404}]),
+ Log = proplists:get_value(transfer_log, Config),
+ Match = list_to_binary(Request ++ Version),
+ {_, [H | _]} = disk_log:chunk(Log, start),
+ {_, _} = binary:match(H, Match).
+
+%%-------------------------------------------------------------------------
non_disturbing_reconfiger_dies(Config) when is_list(Config) ->
do_reconfiger_dies([{http_version, "HTTP/1.1"} | Config], non_disturbing).
disturbing_reconfiger_dies(Config) when is_list(Config) ->
@@ -1567,7 +1734,9 @@ start_apps(Group) when Group == http_basic;
Group == http_auth_api_mnesia;
Group == http_htaccess;
Group == http_security;
+ Group == http_logging;
Group == http_reload;
+ Group == http_post;
Group == http_mime_types->
inets_test_lib:start_apps([inets]).
@@ -1614,6 +1783,8 @@ server_config(https_basic, Config) ->
basic_conf() ++ server_config(https, Config);
server_config(http_reload, Config) ->
[{keep_alive_timeout, 2}] ++ server_config(http, Config);
+server_config(http_post, Config) ->
+ [{max_client_body_chunk, 10}] ++ server_config(http, Config);
server_config(https_reload, Config) ->
[{keep_alive_timeout, 2}] ++ server_config(https, Config);
server_config(http_limit, Config) ->
@@ -1662,6 +1833,8 @@ server_config(http_security, Config) ->
server_config(https_security, Config) ->
ServerRoot = proplists:get_value(server_root, Config),
tl(auth_conf(ServerRoot)) ++ security_conf(ServerRoot) ++ server_config(https, Config);
+server_config(http_logging, Config) ->
+ log_conf() ++ server_config(http, Config);
server_config(http_mime_types, Config0) ->
Config1 = basic_conf() ++ server_config(http, Config0),
ServerRoot = proplists:get_value(server_root, Config0),
@@ -1863,6 +2036,16 @@ mod_security_conf(SecFile, Dir) ->
{path, Dir} %% This is should not be needed, but is atm, awful design!
].
+log_conf() ->
+ [{modules, [mod_alias, mod_dir, mod_get, mod_head, mod_disk_log]},
+ {transfer_disk_log, "httpd_log_transfer"},
+ {security_disk_log, "httpd_log_security"},
+ {error_disk_log, "httpd_log_error"},
+ {transfer_disk_log_size, {1048576, 5}},
+ {error_disk_log_size, {1048576, 5}},
+ {error_disk_log_size, {1048576, 5}},
+ {security_disk_log_size, {1048576, 5}},
+ {disk_log_format, internal}].
http_status(Request, Config, Expected) ->
Version = proplists:get_value(http_version, Config),
diff --git a/lib/inets/test/uri_SUITE.erl b/lib/inets/test/uri_SUITE.erl
index 3e7799141c..f973296af6 100644
--- a/lib/inets/test/uri_SUITE.erl
+++ b/lib/inets/test/uri_SUITE.erl
@@ -277,8 +277,8 @@ encode_decode(Config) when is_list(Config) ->
?assertEqual("foo%20bar", http_uri:encode("foo bar")),
?assertEqual(<<"foo%20bar">>, http_uri:encode(<<"foo bar">>)),
- ?assertEqual("foo bar", http_uri:decode("foo+bar")),
- ?assertEqual(<<"foo bar">>, http_uri:decode(<<"foo+bar">>)),
+ ?assertEqual("foo+bar", http_uri:decode("foo+bar")),
+ ?assertEqual(<<"foo+bar">>, http_uri:decode(<<"foo+bar">>)),
?assertEqual("foo bar", http_uri:decode("foo%20bar")),
?assertEqual(<<"foo bar">>, http_uri:decode(<<"foo%20bar">>)),
?assertEqual("foo\r\n", http_uri:decode("foo%0D%0A")),
diff --git a/lib/inets/vsn.mk b/lib/inets/vsn.mk
index 96796f11c0..c4314f1ab5 100644
--- a/lib/inets/vsn.mk
+++ b/lib/inets/vsn.mk
@@ -19,6 +19,6 @@
# %CopyrightEnd%
APPLICATION = inets
-INETS_VSN = 6.4
+INETS_VSN = 6.4.1
PRE_VSN =
APP_VSN = "$(APPLICATION)-$(INETS_VSN)$(PRE_VSN)"
diff --git a/lib/kernel/doc/src/disk_log.xml b/lib/kernel/doc/src/disk_log.xml
index 1be28adfb8..884cb32c0c 100644
--- a/lib/kernel/doc/src/disk_log.xml
+++ b/lib/kernel/doc/src/disk_log.xml
@@ -972,7 +972,7 @@
<item>
<p>Specifies if messages will be sent to
<c>error_logger</c> on recoverable errors with
- the log files. Defaults to <c>true</c>.</p>
+ the log files. Defaults to <c>false</c>.</p>
</item>
</taglist>
<p><c>open/1</c> returns <c>{ok, <anno>Log</anno>}</c> if the
diff --git a/lib/kernel/doc/src/inet.xml b/lib/kernel/doc/src/inet.xml
index b71e8a1e5d..169a76463b 100644
--- a/lib/kernel/doc/src/inet.xml
+++ b/lib/kernel/doc/src/inet.xml
@@ -222,11 +222,18 @@ fe80::204:acff:fe17:bf38
<name name="get_rc" arity="0"/>
<fsummary>Return a list of IP configuration parameters.</fsummary>
<desc>
- <p>Returns the state of the <c>Inet</c> configuration database in
+ <p>
+ Returns the state of the <c>Inet</c> configuration database in
form of a list of recorded configuration parameters. For more
information, see <seealso marker="erts:inet_cfg">ERTS User's Guide:
Inet Configuration</seealso>.
- Only parameters with other than default values are returned.</p>
+ </p>
+ <p>
+ Only actual parameters with other than default values
+ are returned, for example not directives that specify
+ other sources for configuration parameters nor
+ directives that clear parameters.
+ </p>
</desc>
</func>
diff --git a/lib/kernel/doc/src/notes.xml b/lib/kernel/doc/src/notes.xml
index e1cf45109d..9cd03ffcad 100644
--- a/lib/kernel/doc/src/notes.xml
+++ b/lib/kernel/doc/src/notes.xml
@@ -31,6 +31,21 @@
</header>
<p>This document describes the changes made to the Kernel application.</p>
+<section><title>Kernel 5.3.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>The documentation for the 'quiet' option in
+ disk_log:open/1 had an incorrect default value.</p>
+ <p>
+ Own Id: OTP-14498</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Kernel 5.3</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/kernel/src/error_logger.erl b/lib/kernel/src/error_logger.erl
index 9bf8547745..f07ec52cfa 100644
--- a/lib/kernel/src/error_logger.erl
+++ b/lib/kernel/src/error_logger.erl
@@ -499,27 +499,16 @@ display4(A = [_|_]) ->
display4(A) ->
erlang:display(A).
-string_p([]) ->
+
+string_p(Term) when is_list(Term) ->
+ string_p1(lists:flatten(Term));
+string_p(_Term) ->
+ false.
+
+string_p1([]) ->
false;
-string_p(Term) ->
- string_p1(Term).
-
-string_p1([H|T]) when is_integer(H), H >= $\s, H < 255 ->
- string_p1(T);
-string_p1([$\n|T]) -> string_p1(T);
-string_p1([$\r|T]) -> string_p1(T);
-string_p1([$\t|T]) -> string_p1(T);
-string_p1([$\v|T]) -> string_p1(T);
-string_p1([$\b|T]) -> string_p1(T);
-string_p1([$\f|T]) -> string_p1(T);
-string_p1([$\e|T]) -> string_p1(T);
-string_p1([H|T]) when is_list(H) ->
- case string_p1(H) of
- true -> string_p1(T);
- _ -> false
- end;
-string_p1([]) -> true;
-string_p1(_) -> false.
+string_p1(FlatList) ->
+ io_lib:printable_list(FlatList).
-spec limit_term(term()) -> term().
diff --git a/lib/kernel/src/erts_debug.erl b/lib/kernel/src/erts_debug.erl
index ad92aafc2f..480db6814e 100644
--- a/lib/kernel/src/erts_debug.erl
+++ b/lib/kernel/src/erts_debug.erl
@@ -33,10 +33,10 @@
-export([breakpoint/2, disassemble/1, display/1, dist_ext_to_term/2,
dump_monitors/1, dump_links/1, flat_size/1,
- get_internal_state/1, instructions/0, lock_counters/1,
+ get_internal_state/1, instructions/0,
map_info/1, same/2, set_internal_state/2,
- size_shared/1, copy_shared/1, dirty_cpu/2, dirty_io/2,
- dirty/3]).
+ size_shared/1, copy_shared/1, dirty_cpu/2, dirty_io/2, dirty/3,
+ lcnt_control/1, lcnt_control/2, lcnt_collect/0, lcnt_clear/0]).
-spec breakpoint(MFA, Flag) -> non_neg_integer() when
MFA :: {Module :: module(),
@@ -142,12 +142,31 @@ ic(F) when is_function(F) ->
io:format("Total: ~w~n",[lists:sum([C||{_I,C}<-Is])]),
R.
--spec lock_counters(info) -> term();
- (clear) -> ok;
- ({copy_save, boolean()}) -> boolean();
- ({process_locks, boolean()}) -> boolean().
+-spec lcnt_control
+ (copy_save, boolean()) -> ok;
+ (mask, list(atom())) -> ok.
-lock_counters(_) ->
+lcnt_control(_Option, _Value) ->
+ erlang:nif_error(undef).
+
+-spec lcnt_control
+ (copy_save) -> boolean();
+ (mask) -> list(atom()).
+
+lcnt_control(_Option) ->
+ erlang:nif_error(undef).
+
+-type lcnt_lock_info() :: {atom(), term(), atom(), term()}.
+
+-spec lcnt_collect() ->
+ list({duration, {non_neg_integer(), non_neg_integer()}} |
+ {locks, list(lcnt_lock_info())}).
+
+lcnt_collect() ->
+ erlang:nif_error(undef).
+
+-spec lcnt_clear() -> ok.
+lcnt_clear() ->
erlang:nif_error(undef).
-spec same(Term1, Term2) -> boolean() when
diff --git a/lib/kernel/src/file_server.erl b/lib/kernel/src/file_server.erl
index 6504174cbc..6e8f64d932 100644
--- a/lib/kernel/src/file_server.erl
+++ b/lib/kernel/src/file_server.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -207,7 +207,7 @@ handle_call(stop, _From, Handle) ->
{stop, normal, stopped, Handle};
handle_call(Request, From, Handle) ->
- error_logger:error_msg("handle_call(~p, ~p, _)", [Request, From]),
+ error_logger:error_msg("handle_call(~tp, ~tp, _)", [Request, From]),
{noreply, Handle}.
%%----------------------------------------------------------------------
@@ -220,7 +220,7 @@ handle_call(Request, From, Handle) ->
-spec handle_cast(term(), state()) -> {'noreply', state()}.
handle_cast(Msg, State) ->
- error_logger:error_msg("handle_cast(~p, _)", [Msg]),
+ error_logger:error_msg("handle_cast(~tp, _)", [Msg]),
{noreply, State}.
%%----------------------------------------------------------------------
@@ -243,7 +243,7 @@ handle_info({'EXIT', Handle, _Reason}, Handle) ->
{stop, normal, Handle};
handle_info(Info, State) ->
- error_logger:error_msg("handle_Info(~p, _)", [Info]),
+ error_logger:error_msg("handle_Info(~tp, _)", [Info]),
{noreply, State}.
%%----------------------------------------------------------------------
diff --git a/lib/kernel/src/global.erl b/lib/kernel/src/global.erl
index a9e92b28b8..a38522eb5c 100644
--- a/lib/kernel/src/global.erl
+++ b/lib/kernel/src/global.erl
@@ -262,7 +262,7 @@ check_dupname(Name, Pid) ->
{ok, allow} ->
true;
_ ->
- S = "global: ~w registered under several names: ~w\n",
+ S = "global: ~w registered under several names: ~tw\n",
Names = [Name | [Name1 || {_Pid, Name1} <- PidNames]],
error_logger:error_msg(S, [Pid, Names]),
false
@@ -659,7 +659,7 @@ handle_call(stop, _From, S) ->
handle_call(Request, From, S) ->
error_logger:warning_msg("The global_name_server "
"received an unexpected message:\n"
- "handle_call(~p, ~p, _)\n",
+ "handle_call(~tp, ~tp, _)\n",
[Request, From]),
{noreply, S}.
@@ -828,7 +828,7 @@ handle_cast({async_del_lock, _ResourceId, _Pid}, S) ->
handle_cast(Request, S) ->
error_logger:warning_msg("The global_name_server "
"received an unexpected message:\n"
- "handle_cast(~p, _)\n", [Request]),
+ "handle_cast(~tp, _)\n", [Request]),
{noreply, S}.
%%========================================================================
@@ -955,7 +955,7 @@ handle_info({'DOWN', MonitorRef, process, _Pid, _Info}, S0) ->
handle_info(Message, S) ->
error_logger:warning_msg("The global_name_server "
"received an unexpected message:\n"
- "handle_info(~p, _)\n", [Message]),
+ "handle_info(~tp, _)\n", [Message]),
{noreply, S}.
@@ -1949,13 +1949,13 @@ exchange_names([{Name, Pid, Method} | Tail], Node, Ops, Res) ->
exchange_names(Tail, Node, [Op | Ops], [Op | Res]);
{badrpc, Badrpc} ->
error_logger:info_msg("global: badrpc ~w received when "
- "conflicting name ~w was found\n",
+ "conflicting name ~tw was found\n",
[Badrpc, Name]),
Op = {insert, {Name, Pid, Method}},
exchange_names(Tail, Node, [Op | Ops], Res);
Else ->
error_logger:info_msg("global: Resolve method ~w for "
- "conflicting name ~w returned ~w\n",
+ "conflicting name ~tw returned ~tw\n",
[Method, Name, Else]),
Op = {delete, Name},
exchange_names(Tail, Node, [Op | Ops], [Op | Res])
@@ -1984,7 +1984,7 @@ minmax(P1,P2) ->
Pid2 :: pid().
random_exit_name(Name, Pid, Pid2) ->
{Min, Max} = minmax(Pid, Pid2),
- error_logger:info_msg("global: Name conflict terminating ~w\n",
+ error_logger:info_msg("global: Name conflict terminating ~tw\n",
[{Name, Max}]),
exit(Max, kill),
Min.
@@ -2200,7 +2200,7 @@ unexpected_message({'EXIT', _Pid, _Reason}, _What) ->
ok;
unexpected_message(Message, What) ->
error_logger:warning_msg("The global_name_server ~w process "
- "received an unexpected message:\n~p\n",
+ "received an unexpected message:\n~tp\n",
[What, Message]).
%%% Utilities
diff --git a/lib/kernel/src/inet.erl b/lib/kernel/src/inet.erl
index 6aef5476f1..dc20c21c77 100644
--- a/lib/kernel/src/inet.erl
+++ b/lib/kernel/src/inet.erl
@@ -151,7 +151,8 @@
%%% ---------------------------------
--spec get_rc() -> [{Par :: any(), Val :: any()}].
+-spec get_rc() -> [{Par :: atom(), Val :: any()} |
+ {Par :: atom(), Val1 :: any(), Val2 :: any()}].
get_rc() ->
inet_db:get_rc().
diff --git a/lib/kernel/src/kernel.app.src b/lib/kernel/src/kernel.app.src
index e150938487..2a11b04310 100644
--- a/lib/kernel/src/kernel.app.src
+++ b/lib/kernel/src/kernel.app.src
@@ -120,6 +120,6 @@
{applications, []},
{env, [{error_logger, tty}]},
{mod, {kernel, []}},
- {runtime_dependencies, ["erts-9.0", "stdlib-3.0", "sasl-3.0"]}
+ {runtime_dependencies, ["erts-9.1", "stdlib-3.0", "sasl-3.0"]}
]
}.
diff --git a/lib/kernel/src/kernel.appup.src b/lib/kernel/src/kernel.appup.src
index 77085b2064..f1ef70a373 100644
--- a/lib/kernel/src/kernel.appup.src
+++ b/lib/kernel/src/kernel.appup.src
@@ -18,7 +18,7 @@
%% %CopyrightEnd%
{"%VSN%",
%% Up from - max one major revision back
- [{<<"5\\.[0-2](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.*
+ [{<<"5\\.[0-3](\\.[0-9]+)*">>,[restart_new_emulator]}], % OTP-19.*, OTP-20.0
%% Down to - max one major revision back
- [{<<"5\\.[0-2](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.*
+ [{<<"5\\.[0-3](\\.[0-9]+)*">>,[restart_new_emulator]}] % OTP-19.*, OTP-20.0
}.
diff --git a/lib/kernel/src/net_kernel.erl b/lib/kernel/src/net_kernel.erl
index ddda396713..7da89dd7cb 100644
--- a/lib/kernel/src/net_kernel.erl
+++ b/lib/kernel/src/net_kernel.erl
@@ -778,7 +778,7 @@ handle_info(transition_period_end,
{noreply,State#state{tick = #tick{ticker = Tckr, time = T}}};
handle_info(X, State) ->
- error_msg("Net kernel got ~w~n",[X]),
+ error_msg("Net kernel got ~tw~n",[X]),
{noreply,State}.
%% -----------------------------------------------------------
diff --git a/lib/kernel/src/pg2.erl b/lib/kernel/src/pg2.erl
index edf4aedde2..c4732f37ee 100644
--- a/lib/kernel/src/pg2.erl
+++ b/lib/kernel/src/pg2.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1997-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1997-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -199,7 +199,7 @@ handle_call({delete, Name}, _From, S) ->
{reply, ok, S};
handle_call(Request, From, S) ->
error_logger:warning_msg("The pg2 server received an unexpected message:\n"
- "handle_call(~p, ~p, _)\n",
+ "handle_call(~tp, ~tp, _)\n",
[Request, From]),
{noreply, S}.
diff --git a/lib/kernel/src/user_drv.erl b/lib/kernel/src/user_drv.erl
index b794d4f45e..99ea4210bd 100644
--- a/lib/kernel/src/user_drv.erl
+++ b/lib/kernel/src/user_drv.erl
@@ -175,6 +175,18 @@ server_loop(Iport, Oport, Curr, User, Gr, {Resp, IOQ} = IOQueue) ->
{Iport,eof} ->
Curr ! {self(),eof},
server_loop(Iport, Oport, Curr, User, Gr, IOQueue);
+
+ %% We always handle geometry and unicode requests
+ {Requester,tty_geometry} ->
+ Requester ! {self(),tty_geometry,get_tty_geometry(Iport)},
+ server_loop(Iport, Oport, Curr, User, Gr, IOQueue);
+ {Requester,get_unicode_state} ->
+ Requester ! {self(),get_unicode_state,get_unicode_state(Iport)},
+ server_loop(Iport, Oport, Curr, User, Gr, IOQueue);
+ {Requester,set_unicode_state, Bool} ->
+ Requester ! {self(),set_unicode_state,set_unicode_state(Iport,Bool)},
+ server_loop(Iport, Oport, Curr, User, Gr, IOQueue);
+
Req when element(1,Req) =:= User orelse element(1,Req) =:= Curr,
tuple_size(Req) =:= 2 orelse tuple_size(Req) =:= 3 ->
%% We match {User|Curr,_}|{User|Curr,_,_}
@@ -224,21 +236,16 @@ server_loop(Iport, Oport, Curr, User, Gr, {Resp, IOQ} = IOQueue) ->
_ -> % not current, just remove it
server_loop(Iport, Oport, Curr, User, gr_del_pid(Gr, Pid), IOQueue)
end;
+ {Requester, {put_chars_sync, _, _, Reply}} ->
+ %% We need to ack the Req otherwise originating process will hang forever
+ %% Do discard the output to non visible shells (as was done previously)
+ Requester ! {reply, Reply},
+ server_loop(Iport, Oport, Curr, User, Gr, IOQueue);
_X ->
- %% Ignore unknown messages.
- server_loop(Iport, Oport, Curr, User, Gr, IOQueue)
+ %% Ignore unknown messages.
+ server_loop(Iport, Oport, Curr, User, Gr, IOQueue)
end.
-%% We always handle geometry and unicode requests
-handle_req({Curr,tty_geometry},Iport,_Oport,IOQueue) ->
- Curr ! {self(),tty_geometry,get_tty_geometry(Iport)},
- IOQueue;
-handle_req({Curr,get_unicode_state},Iport,_Oport,IOQueue) ->
- Curr ! {self(),get_unicode_state,get_unicode_state(Iport)},
- IOQueue;
-handle_req({Curr,set_unicode_state, Bool},Iport,_Oport,IOQueue) ->
- Curr ! {self(),set_unicode_state,set_unicode_state(Iport,Bool)},
- IOQueue;
handle_req(next,Iport,Oport,{false,IOQ}=IOQueue) ->
case queue:out(IOQ) of
{empty,_} ->
diff --git a/lib/kernel/test/Makefile b/lib/kernel/test/Makefile
index b9942e899f..efe3a68531 100644
--- a/lib/kernel/test/Makefile
+++ b/lib/kernel/test/Makefile
@@ -148,8 +148,8 @@ release_tests_spec: make_emakefile
$(INSTALL_DIR) "$(RELSYSDIR)"
$(INSTALL_DATA) $(ERL_FILES) "$(RELSYSDIR)"
$(INSTALL_DATA) $(APP_FILES) "$(RELSYSDIR)"
- $(INSTALL_DATA) kernel.spec kernel_smoke.spec $(EMAKEFILE)\
- $(COVERFILE) "$(RELSYSDIR)"
+ $(INSTALL_DATA) kernel.spec kernel_smoke.spec kernel_bench.spec \
+ $(EMAKEFILE) $(COVERFILE) "$(RELSYSDIR)"
chmod -R u+w "$(RELSYSDIR)"
@tar cf - *_SUITE_data | (cd "$(RELSYSDIR)"; tar xf -)
diff --git a/lib/kernel/test/code_SUITE.erl b/lib/kernel/test/code_SUITE.erl
index 6f8e949aac..612f77149d 100644
--- a/lib/kernel/test/code_SUITE.erl
+++ b/lib/kernel/test/code_SUITE.erl
@@ -35,6 +35,7 @@
purge_stacktrace/1, mult_lib_roots/1, bad_erl_libs/1,
code_archive/1, code_archive2/1, on_load/1, on_load_binary/1,
on_load_embedded/1, on_load_errors/1, on_load_update/1,
+ on_load_trace_on_load/1,
on_load_purge/1, on_load_self_call/1, on_load_pending/1,
on_load_deleted/1,
big_boot_embedded/1,
@@ -66,14 +67,16 @@ all() ->
ext_mod_dep, clash, where_is_file,
purge_stacktrace, mult_lib_roots,
bad_erl_libs, code_archive, code_archive2, on_load,
- on_load_binary, on_load_embedded, on_load_errors, on_load_update,
+ on_load_binary, on_load_embedded, on_load_errors,
+ {group, sequence},
on_load_purge, on_load_self_call, on_load_pending,
on_load_deleted,
module_status,
big_boot_embedded, native_early_modules, get_mode, normalized_paths].
-groups() ->
- [].
+%% These need to run in order
+groups() -> [{sequence, [sequence], [on_load_update,
+ on_load_trace_on_load]}].
init_per_group(_GroupName, Config) ->
Config.
@@ -1493,7 +1496,7 @@ do_on_load_error(ReturnValue) ->
{undef,[{on_load_error,main,[],_}|_]} = Exit
end.
-on_load_update(_Config) ->
+on_load_update(Config) ->
{Mod,Code1} = on_load_update_code(1),
{module,Mod} = code:load_binary(Mod, "", Code1),
42 = Mod:a(),
@@ -1503,7 +1506,7 @@ on_load_update(_Config) ->
{Mod,Code2} = on_load_update_code(2),
{error,on_load_failure} = code:load_binary(Mod, "", Code2),
42 = Mod:a(),
- 100 = Mod:b(99),
+ 78 = Mod:b(77),
{'EXIT',{undef,_}} = (catch Mod:never()),
4 = erlang:trace_pattern({Mod,'_','_'}, false),
@@ -1514,6 +1517,9 @@ on_load_update(_Config) ->
{'EXIT',{undef,_}} = (catch Mod:b(10)),
{'EXIT',{undef,_}} = (catch Mod:never()),
+ code:purge(Mod),
+ code:delete(Mod),
+ code:purge(Mod),
ok.
on_load_update_code(Version) ->
@@ -1545,6 +1551,40 @@ on_load_update_code_1(3, Mod) ->
"f() -> ok.\n",
"c() -> 100.\n"]).
+%% Test -on_load while trace feature 'on_load' is enabled (OTP-14612)
+on_load_trace_on_load(Config) ->
+ Papa = self(),
+ Tracer = spawn_link(fun F() -> receive M -> Papa ! M end, F() end),
+ {tracer,[]} = erlang:trace_info(self(),tracer),
+ erlang:trace(self(), true, [call, {tracer, Tracer}]),
+ erlang:trace_pattern(on_load, true, []),
+ on_load_update(Config),
+ erlang:trace_pattern(on_load, false, []),
+ erlang:trace(self(), false, [call]),
+
+ %% WE GET TRACES FOR CALLS TO UNDEFINED FUNCTIONS ???
+ %% Remove filter when that is fixed.
+ Ms = lists:filter(fun({trace,Papa,call,
+ {error_handler,undefined_function,
+ [on_load_update_code,_,_]}})
+ -> false;
+ (_) -> true
+ end,
+ flush()),
+
+ [{trace, Papa, call, {on_load_update_code, a, []}},
+ {trace, Papa, call, {on_load_update_code, b, [99]}},
+ {trace, Papa, call, {on_load_update_code, c, []}}] = Ms,
+
+ exit(Tracer, normal),
+ ok.
+
+flush() ->
+ receive M -> [M | flush()]
+ after 100 -> []
+ end.
+
+
on_load_purge(_Config) ->
Mod = ?FUNCTION_NAME,
register(Mod, self()),
diff --git a/lib/kernel/test/gen_tcp_misc_SUITE.erl b/lib/kernel/test/gen_tcp_misc_SUITE.erl
index 929f66d400..331864b5de 100644
--- a/lib/kernel/test/gen_tcp_misc_SUITE.erl
+++ b/lib/kernel/test/gen_tcp_misc_SUITE.erl
@@ -50,9 +50,8 @@
killing_acceptor/1,killing_multi_acceptors/1,killing_multi_acceptors2/1,
several_accepts_in_one_go/1, accept_system_limit/1,
active_once_closed/1, send_timeout/1, send_timeout_active/1,
- otp_7731/1, zombie_sockets/1, otp_7816/1, otp_8102/1,
- wrapping_oct/0, wrapping_oct/1,
- otp_9389/1]).
+ otp_7731/1, zombie_sockets/1, otp_7816/1, otp_8102/1,
+ wrapping_oct/0, wrapping_oct/1, otp_9389/1, otp_13939/1]).
%% Internal exports.
-export([sender/3, not_owner/1, passive_sockets_server/2, priority_server/1,
@@ -3014,3 +3013,42 @@ ok({ok,V}) -> V.
get_hostname(Name) ->
"@"++Host = lists:dropwhile(fun(C) -> C =/= $@ end, atom_to_list(Name)),
Host.
+
+otp_13939(doc) ->
+ ["Check that writing to a remotely closed socket doesn't block forever "
+ "when exit_on_close is false."];
+otp_13939(suite) ->
+ [];
+otp_13939(Config) when is_list(Config) ->
+ {Pid, Ref} = spawn_opt(
+ fun() ->
+ {ok, Listener} = gen_tcp:listen(0, [{exit_on_close, false}]),
+ {ok, Port} = inet:port(Listener),
+
+ spawn_link(
+ fun() ->
+ {ok, Client} = gen_tcp:connect("localhost", Port,
+ [{active, false}]),
+ ok = gen_tcp:close(Client)
+ end),
+
+ {ok, Accepted} = gen_tcp:accept(Listener),
+
+ ok = gen_tcp:send(Accepted, <<0:(10*1024*1024*8)>>),
+
+ %% The bug surfaces when there's a delay between the send
+ %% operations; inet:getstat is a red herring.
+ timer:sleep(100),
+
+ {error, Code} = gen_tcp:send(Accepted, <<0:(10*1024*1024*8)>>),
+ ct:pal("gen_tcp:send returned ~p~n", [Code])
+ end, [link, monitor]),
+
+ receive
+ {'DOWN', Ref, process, Pid, normal} ->
+ ok
+ after 1000 ->
+ demonitor(Ref, [flush]),
+ exit(Pid, normal),
+ ct:fail("Server process blocked on send.")
+ end.
diff --git a/lib/kernel/test/kernel_bench.spec b/lib/kernel/test/kernel_bench.spec
new file mode 100644
index 0000000000..8de60dae31
--- /dev/null
+++ b/lib/kernel/test/kernel_bench.spec
@@ -0,0 +1 @@
+{groups,"../kernel_test",zlib_SUITE,[bench]}.
diff --git a/lib/kernel/test/zlib_SUITE.erl b/lib/kernel/test/zlib_SUITE.erl
index 4b67fce9a8..e246276262 100644
--- a/lib/kernel/test/zlib_SUITE.erl
+++ b/lib/kernel/test/zlib_SUITE.erl
@@ -21,60 +21,56 @@
-module(zlib_SUITE).
-include_lib("common_test/include/ct.hrl").
-
--compile(export_all).
-
--define(error(Format,Args),
- put(test_server_loc,{?MODULE,?LINE}),
- error(Format,Args,?MODULE,?LINE)).
-
-%% Learn erts team how to really write tests ;-)
--define(m(ExpectedRes,Expr),
- fun() ->
- ACtual1 = (catch (Expr)),
- try case ACtual1 of
- ExpectedRes -> ACtual1
- end
- catch
- error:{case_clause,ACtuAl} ->
- ?error("Not Matching Actual result was:~n ~p ~n",
- [ACtuAl]),
- ACtuAl
- end
- end()).
-
--define(BARG, {'EXIT',{badarg,[{zlib,_,_,_}|_]}}).
--define(DATA_ERROR, {'EXIT',{data_error,[{zlib,_,_,_}|_]}}).
-
-init_per_testcase(_Func, Config) ->
- Config.
-
-end_per_testcase(_Func, _Config) ->
- ok.
-
-error(Format, Args, File, Line) ->
- io:format("~p:~p: ERROR: " ++ Format, [File,Line|Args]),
- group_leader() ! {failed, File, Line}.
-
-%% Hopefully I don't need this to get it to work with the testserver..
-%% Fail = #'REASON'{file = filename:basename(File),
-%% line = Line,
-%% desc = Args},
-%% case global:whereis_name(mnesia_test_case_sup) of
-%% undefined ->
-%% ignore;
-%% Pid ->
-%% Pid ! Fail
-%% %% global:send(mnesia_test_case_sup, Fail),
-%% end,
-%% log("<>ERROR<>~n" ++ Format, Args, File, Line).
+-include_lib("common_test/include/ct_event.hrl").
+
+-export([suite/0, all/0, groups/0]).
+
+%% API group
+-export([api_open_close/1]).
+-export([api_deflateInit/1, api_deflateSetDictionary/1, api_deflateReset/1,
+ api_deflateParams/1, api_deflate/1, api_deflateEnd/1]).
+-export([api_inflateInit/1, api_inflateReset/1, api_inflate2/1, api_inflate3/1,
+ api_inflateChunk/1, api_safeInflate/1, api_inflateEnd/1]).
+-export([api_inflateSetDictionary/1, api_inflateGetDictionary/1]).
+-export([api_crc32/1, api_adler32/1]).
+-export([api_un_compress/1, api_un_zip/1, api_g_un_zip/1]).
+
+%% Examples group
+-export([intro/1]).
+
+%% Usage group
+-export([zip_usage/1, gz_usage/1, gz_usage2/1, compress_usage/1,
+ dictionary_usage/1, large_deflate/1, crc/1, adler/1,
+ only_allow_owner/1, sub_heap_binaries/1]).
+
+%% Bench group
+-export([inflate_bench_zeroed/1, inflate_bench_rand/1,
+ deflate_bench_zeroed/1, deflate_bench_rand/1,
+ chunk_bench_zeroed/1, chunk_bench_rand/1]).
+
+%% Others
+-export([smp/1, otp_9981/1, otp_7359/1]).
+
+-define(m(Guard, Expression),
+ fun() ->
+ Actual = (catch (Expression)),
+ case Actual of
+ Guard -> Actual;
+ _Other ->
+ ct:fail("Failed to match ~p, actual result was ~p",
+ [??Guard, Actual])
+ end
+ end()).
+
+-define(EXIT(Reason), {'EXIT',{Reason,[{_,_,_,_}|_]}}).
suite() ->
[{ct_hooks,[ts_install_cth]},
{timetrap,{minutes,1}}].
all() ->
- [{group, api}, {group, examples}, {group, func}, smp,
+ [{group, api}, {group, examples}, {group, func},
+ {group, bench}, smp,
otp_9981,
otp_7359].
@@ -84,28 +80,19 @@ groups() ->
api_deflateSetDictionary, api_deflateReset,
api_deflateParams, api_deflate, api_deflateEnd,
api_inflateInit, api_inflateSetDictionary, api_inflateGetDictionary,
- api_inflateSync, api_inflateReset, api_inflate, api_inflateChunk,
- api_inflateEnd, api_setBufsz, api_getBufsz, api_crc32,
- api_adler32, api_getQSize, api_un_compress, api_un_zip,
+ api_inflateReset, api_inflate2, api_inflate3, api_inflateChunk,
+ api_safeInflate, api_inflateEnd, api_crc32,
+ api_adler32, api_un_compress, api_un_zip,
api_g_un_zip]},
{examples, [], [intro]},
{func, [],
[zip_usage, gz_usage, gz_usage2, compress_usage,
- dictionary_usage, large_deflate, crc, adler]}].
-
-init_per_suite(Config) ->
- Config.
-
-end_per_suite(_Config) ->
- ok.
-
-init_per_group(_GroupName, Config) ->
- Config.
-
-end_per_group(_GroupName, Config) ->
- Config.
-
-
+ dictionary_usage, large_deflate, crc, adler,
+ only_allow_owner, sub_heap_binaries]},
+ {bench,
+ [inflate_bench_zeroed, inflate_bench_rand,
+ deflate_bench_zeroed, deflate_bench_rand,
+ chunk_bench_zeroed, chunk_bench_rand]}].
%% Test open/0 and close/1.
api_open_close(Config) when is_list(Config) ->
@@ -113,7 +100,7 @@ api_open_close(Config) when is_list(Config) ->
Fd2 = zlib:open(),
?m(false,Fd1 == Fd2),
?m(ok,zlib:close(Fd1)),
- ?m(?BARG, zlib:close(Fd1)),
+ ?m(?EXIT(not_initialized), zlib:close(Fd1)),
?m(ok,zlib:close(Fd2)),
%% Make sure that we don't get any EXIT messages if trap_exit is enabled.
@@ -128,9 +115,11 @@ api_open_close(Config) when is_list(Config) ->
%% Test deflateInit/2 and /6.
api_deflateInit(Config) when is_list(Config) ->
Z1 = zlib:open(),
- ?m(?BARG, zlib:deflateInit(gurka, none)),
- ?m(?BARG, zlib:deflateInit(gurka, gurka)),
- ?m(?BARG, zlib:deflateInit(Z1, gurka)),
+
+ ?m(?EXIT(badarg), zlib:deflateInit(gurka, none)),
+
+ ?m(?EXIT(bad_compression_level), zlib:deflateInit(gurka, gurka)),
+ ?m(?EXIT(bad_compression_level), zlib:deflateInit(Z1, gurka)),
Levels = [none, default, best_speed, best_compression] ++ lists:seq(0,9),
lists:foreach(fun(Level) ->
Z = zlib:open(),
@@ -138,20 +127,30 @@ api_deflateInit(Config) when is_list(Config) ->
?m(ok,zlib:close(Z))
end, Levels),
%% /6
- ?m(?BARG, zlib:deflateInit(Z1,gurka,deflated,-15,8,default)),
-
- ?m(?BARG, zlib:deflateInit(Z1,default,undefined,-15,8,default)),
-
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,48,8,default)),
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-20,8,default)),
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-7,8,default)),
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,7,8,default)),
-
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-15,0,default)),
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-15,10,default)),
-
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-15,8,0)),
- ?m(?BARG, zlib:deflateInit(Z1,default,deflated,-15,8,undefined)),
+ ?m(?EXIT(bad_compression_level),
+ zlib:deflateInit(Z1,gurka,deflated,-15,8,default)),
+
+ ?m(?EXIT(bad_compression_method),
+ zlib:deflateInit(Z1,default,undefined,-15,8,default)),
+
+ ?m(?EXIT(bad_compression_strategy),
+ zlib:deflateInit(Z1,default,deflated,-15,8,0)),
+ ?m(?EXIT(bad_compression_strategy),
+ zlib:deflateInit(Z1,default,deflated,-15,8,undefined)),
+
+ ?m(?EXIT(bad_windowbits),
+ zlib:deflateInit(Z1,default,deflated,48,8,default)),
+ ?m(?EXIT(bad_windowbits),
+ zlib:deflateInit(Z1,default,deflated,-20,8,default)),
+ ?m(?EXIT(bad_windowbits),
+ zlib:deflateInit(Z1,default,deflated,-7,8,default)),
+ ?m(?EXIT(bad_windowbits),
+ zlib:deflateInit(Z1,default,deflated,7,8,default)),
+
+ ?m(?EXIT(bad_memlevel),
+ zlib:deflateInit(Z1,default,deflated,-15,0,default)),
+ ?m(?EXIT(bad_memlevel),
+ zlib:deflateInit(Z1,default,deflated,-15,10,default)),
lists:foreach(fun(Level) ->
Z = zlib:open(),
@@ -183,7 +182,11 @@ api_deflateInit(Config) when is_list(Config) ->
?m(ok,zlib:close(Z))
end, Strategies),
?m(ok, zlib:deflateInit(Z1,default,deflated,-15,8,default)),
- ?m({'EXIT',_}, zlib:deflateInit(Z1,none,deflated,-15,8,default)), %% ??
+
+ %% Let it crash for any reason; we don't care about the order in which the
+ %% parameters are checked.
+ ?m(?EXIT(_), zlib:deflateInit(Z1,none,deflated,-15,8,default)),
+
?m(ok, zlib:close(Z1)).
%% Test deflateSetDictionary.
@@ -192,17 +195,17 @@ api_deflateSetDictionary(Config) when is_list(Config) ->
?m(ok, zlib:deflateInit(Z1, default)),
?m(Id when is_integer(Id), zlib:deflateSetDictionary(Z1, <<1,1,2,3,4,5,1>>)),
?m(Id when is_integer(Id), zlib:deflateSetDictionary(Z1, [1,1,2,3,4,5,1])),
- ?m(?BARG, zlib:deflateSetDictionary(Z1, gurka)),
- ?m(?BARG, zlib:deflateSetDictionary(Z1, 128)),
- ?m(_, zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, none)),
- ?m({'EXIT',{stream_error,_}},zlib:deflateSetDictionary(Z1,<<1,1,2,3,4,5,1>>)),
+ ?m(?EXIT(badarg), zlib:deflateSetDictionary(Z1, gurka)),
+ ?m(?EXIT(badarg), zlib:deflateSetDictionary(Z1, 128)),
+ ?m(L when is_list(L), zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, none)),
+ ?m(?EXIT(stream_error), zlib:deflateSetDictionary(Z1,<<1,1,2,3,4,5,1>>)),
?m(ok, zlib:close(Z1)).
%% Test deflateReset.
api_deflateReset(Config) when is_list(Config) ->
Z1 = zlib:open(),
?m(ok, zlib:deflateInit(Z1, default)),
- ?m(_, zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, none)),
+ ?m(L when is_list(L), zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, none)),
?m(ok, zlib:deflateReset(Z1)),
?m(ok, zlib:deflateReset(Z1)),
%% FIXME how do I make this go wrong??
@@ -212,9 +215,9 @@ api_deflateReset(Config) when is_list(Config) ->
api_deflateParams(Config) when is_list(Config) ->
Z1 = zlib:open(),
?m(ok, zlib:deflateInit(Z1, default)),
- ?m(_, zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, none)),
+ ?m(L when is_list(L), zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, none)),
?m(ok, zlib:deflateParams(Z1, best_compression, huffman_only)),
- ?m(_, zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, sync)),
+ ?m(L when is_list(L), zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, sync)),
?m(ok, zlib:close(Z1)).
%% Test deflate.
@@ -231,11 +234,13 @@ api_deflate(Config) when is_list(Config) ->
?m(B when is_list(B), zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, full)),
?m(B when is_list(B), zlib:deflate(Z1, <<>>, finish)),
- ?m(?BARG, zlib:deflate(gurka, <<1,1,1,1,1,1,1,1,1>>, full)),
- ?m(?BARG, zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, asdj)),
- ?m(?BARG, zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, 198)),
+ ?m(?EXIT(badarg), zlib:deflate(gurka, <<1,1,1,1,1,1,1,1,1>>, full)),
+
+ ?m(?EXIT(bad_flush_mode), zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, asdj)),
+ ?m(?EXIT(bad_flush_mode), zlib:deflate(Z1, <<1,1,1,1,1,1,1,1,1>>, 198)),
+
%% Causes problems ERROR REPORT
- ?m(?BARG, zlib:deflate(Z1, [asdj,asd], none)),
+ ?m(?EXIT(badarg), zlib:deflate(Z1, [asdj,asd], none)),
?m(ok, zlib:close(Z1)).
@@ -244,11 +249,11 @@ api_deflateEnd(Config) when is_list(Config) ->
Z1 = zlib:open(),
?m(ok, zlib:deflateInit(Z1, default)),
?m(ok, zlib:deflateEnd(Z1)),
- ?m({'EXIT', {einval,_}}, zlib:deflateEnd(Z1)), %% ??
- ?m(?BARG, zlib:deflateEnd(gurka)),
+ ?m(?EXIT(not_initialized), zlib:deflateEnd(Z1)),
+ ?m(?EXIT(badarg), zlib:deflateEnd(gurka)),
?m(ok, zlib:deflateInit(Z1, default)),
?m(B when is_list(B), zlib:deflate(Z1, <<"Kilroy was here">>)),
- ?m({'EXIT', {data_error,_}}, zlib:deflateEnd(Z1)),
+ ?m(?EXIT(data_error), zlib:deflateEnd(Z1)),
?m(ok, zlib:deflateInit(Z1, default)),
?m(B when is_list(B), zlib:deflate(Z1, <<"Kilroy was here">>)),
?m(B when is_list(B), zlib:deflate(Z1, <<"Kilroy was here">>, finish)),
@@ -259,9 +264,9 @@ api_deflateEnd(Config) when is_list(Config) ->
%% Test inflateInit /1 and /2.
api_inflateInit(Config) when is_list(Config) ->
Z1 = zlib:open(),
- ?m(?BARG, zlib:inflateInit(gurka)),
+ ?m(?EXIT(badarg), zlib:inflateInit(gurka)),
?m(ok, zlib:inflateInit(Z1)),
- ?m({'EXIT',{einval,_}}, zlib:inflateInit(Z1, 15)), %% ??
+ ?m(?EXIT(already_initialized), zlib:inflateInit(Z1, 15)),
lists:foreach(fun(Wbits) ->
Z11 = zlib:open(),
?m(ok, zlib:inflateInit(Z11,Wbits)),
@@ -270,33 +275,34 @@ api_inflateInit(Config) when is_list(Config) ->
?m(ok,zlib:close(Z11)),
?m(ok,zlib:close(Z12))
end, lists:seq(8,15)),
- ?m(?BARG, zlib:inflateInit(gurka, -15)),
- ?m(?BARG, zlib:inflateInit(Z1, 7)),
- ?m(?BARG, zlib:inflateInit(Z1, -7)),
- ?m(?BARG, zlib:inflateInit(Z1, 48)),
- ?m(?BARG, zlib:inflateInit(Z1, -16)),
+ ?m(?EXIT(badarg), zlib:inflateInit(gurka, -15)),
+ ?m(?EXIT(already_initialized), zlib:inflateInit(Z1, 7)),
+ ?m(?EXIT(already_initialized), zlib:inflateInit(Z1, -7)),
+ ?m(?EXIT(already_initialized), zlib:inflateInit(Z1, 48)),
+ ?m(?EXIT(already_initialized), zlib:inflateInit(Z1, -16)),
?m(ok, zlib:close(Z1)).
%% Test inflateSetDictionary.
api_inflateSetDictionary(Config) when is_list(Config) ->
Z1 = zlib:open(),
?m(ok, zlib:inflateInit(Z1)),
- ?m(?BARG, zlib:inflateSetDictionary(gurka,<<1,1,1,1,1>>)),
- ?m(?BARG, zlib:inflateSetDictionary(Z1,102)),
- ?m(?BARG, zlib:inflateSetDictionary(Z1,gurka)),
+ ?m(?EXIT(badarg), zlib:inflateSetDictionary(gurka,<<1,1,1,1,1>>)),
+ ?m(?EXIT(badarg), zlib:inflateSetDictionary(Z1,102)),
+ ?m(?EXIT(badarg), zlib:inflateSetDictionary(Z1,gurka)),
Dict = <<1,1,1,1,1>>,
- ?m({'EXIT',{stream_error,_}}, zlib:inflateSetDictionary(Z1,Dict)),
+ ?m(?EXIT(stream_error), zlib:inflateSetDictionary(Z1,Dict)),
?m(ok, zlib:close(Z1)).
%% Test inflateGetDictionary.
api_inflateGetDictionary(Config) when is_list(Config) ->
Z1 = zlib:open(),
+ zlib:inflateInit(Z1),
IsOperationSupported =
case catch zlib:inflateGetDictionary(Z1) of
- {'EXIT',{einval,_}} -> true;
- {'EXIT',{enotsup,_}} -> false
+ ?EXIT(not_supported) -> false;
+ _ -> true
end,
- _ = zlib:close(Z1),
+ zlib:close(Z1),
api_inflateGetDictionary_if_supported(IsOperationSupported).
api_inflateGetDictionary_if_supported(false) ->
@@ -306,64 +312,53 @@ api_inflateGetDictionary_if_supported(true) ->
Z1 = zlib:open(),
?m(ok, zlib:deflateInit(Z1)),
Dict = <<"foobar barfoo foo bar far boo">>,
- ?m(_, zlib:deflateSetDictionary(Z1, Dict)),
+ Checksum = zlib:deflateSetDictionary(Z1, Dict),
Payload = <<"foobarbarbar">>,
Compressed = zlib:deflate(Z1, Payload, finish),
?m(ok, zlib:close(Z1)),
- % Decompress and test dictionary extraction
+ % Decompress and test dictionary extraction with inflate/2
Z2 = zlib:open(),
?m(ok, zlib:inflateInit(Z2)),
?m(<<>>, iolist_to_binary(zlib:inflateGetDictionary(Z2))),
- ?m({'EXIT',{stream_error,_}}, zlib:inflateSetDictionary(Z2, Dict)),
- ?m({'EXIT',{{need_dictionary,_},_}}, zlib:inflate(Z2, Compressed)),
+ ?m(?EXIT(stream_error), zlib:inflateSetDictionary(Z2, Dict)),
+ ?m(?EXIT({need_dictionary,Checksum}), zlib:inflate(Z2, Compressed)),
?m(ok, zlib:inflateSetDictionary(Z2, Dict)),
?m(Dict, iolist_to_binary(zlib:inflateGetDictionary(Z2))),
- ?m(Payload, iolist_to_binary(zlib:inflate(Z2, Compressed))),
+ Payload = iolist_to_binary(zlib:inflate(Z2, [])),
?m(ok, zlib:close(Z2)),
- ?m(?BARG, zlib:inflateSetDictionary(Z2, Dict)),
- ok.
+ ?m(?EXIT(not_initialized), zlib:inflateSetDictionary(Z2, Dict)),
-%% Test inflateSync.
-api_inflateSync(Config) when is_list(Config) ->
- {skip,"inflateSync/1 sucks"}.
-%% Z1 = zlib:open(),
-%% ?m(ok, zlib:deflateInit(Z1)),
-%% B1list0 = zlib:deflate(Z1, "gurkan gurra ger galna tunnor", full),
-%% B2 = zlib:deflate(Z1, "grodan boll", finish),
-%% io:format("~p\n", [B1list0]),
-%% io:format("~p\n", [B2]),
-%% ?m(ok, zlib:deflateEnd(Z1)),
-%% B1 = clobber(14, list_to_binary(B1list0)),
-%% Compressed = list_to_binary([B1,B2]),
-%% io:format("~p\n", [Compressed]),
-
-%% ?m(ok, zlib:inflateInit(Z1)),
-%% ?m(?BARG, zlib:inflateSync(gurka)),
-%% ?m({'EXIT',{data_error,_}}, zlib:inflate(Z1, Compressed)),
-%% ?m(ok, zlib:inflateSync(Z1)),
-%% Ubs = zlib:inflate(Z1, []),
-%% <<"grodan boll">> = list_to_binary(Ubs),
-%% ?m(ok, zlib:close(Z1)).
-
-clobber(N, Bin) when is_binary(Bin) ->
- T = list_to_tuple(binary_to_list(Bin)),
- Byte = case element(N, T) of
- 255 -> 254;
- B -> B+1
- end,
- list_to_binary(tuple_to_list(setelement(N, T, Byte))).
+ %% ... And do the same for inflate/3
+ Z3 = zlib:open(),
+ ?m(ok, zlib:inflateInit(Z3)),
+ ?m(<<>>, iolist_to_binary(zlib:inflateGetDictionary(Z3))),
+ ?m(?EXIT(stream_error), zlib:inflateSetDictionary(Z3, Dict)),
+
+ {need_dictionary, Checksum, _Output = []} =
+ zlib:inflate(Z3, Compressed, [{exception_on_need_dict, false}]),
+
+ ?m(ok, zlib:inflateSetDictionary(Z3, Dict)),
+ ?m(Dict, iolist_to_binary(zlib:inflateGetDictionary(Z3))),
+
+ Payload = iolist_to_binary(
+ zlib:inflate(Z3, [], [{exception_on_need_dict, false}])),
+
+ ?m(ok, zlib:close(Z3)),
+ ?m(?EXIT(not_initialized), zlib:inflateSetDictionary(Z3, Dict)),
+
+ ok.
%% Test inflateReset.
api_inflateReset(Config) when is_list(Config) ->
Z1 = zlib:open(),
?m(ok, zlib:inflateInit(Z1)),
- ?m(?BARG, zlib:inflateReset(gurka)),
+ ?m(?EXIT(badarg), zlib:inflateReset(gurka)),
?m(ok, zlib:inflateReset(Z1)),
?m(ok, zlib:close(Z1)).
-%% Test inflate.
-api_inflate(Config) when is_list(Config) ->
+%% Test inflate/2
+api_inflate2(Config) when is_list(Config) ->
Data = [<<1,2,2,3,3,3,4,4,4,4>>],
Compressed = zlib:compress(Data),
Z1 = zlib:open(),
@@ -373,12 +368,32 @@ api_inflate(Config) when is_list(Config) ->
?m(ok, zlib:inflateEnd(Z1)),
?m(ok, zlib:inflateInit(Z1)),
?m(Data, zlib:inflate(Z1, Compressed)),
- ?m(?BARG, zlib:inflate(gurka, Compressed)),
- ?m(?BARG, zlib:inflate(Z1, 4384)),
- ?m(?BARG, zlib:inflate(Z1, [atom_list])),
+ ?m(?EXIT(badarg), zlib:inflate(gurka, Compressed)),
+ ?m(?EXIT(badarg), zlib:inflate(Z1, 4384)),
+ ?m(?EXIT(badarg), zlib:inflate(Z1, [atom_list])),
?m(ok, zlib:inflateEnd(Z1)),
?m(ok, zlib:inflateInit(Z1)),
- ?m({'EXIT',{data_error,_}}, zlib:inflate(Z1, <<2,1,2,1,2>>)),
+ ?m(?EXIT(data_error), zlib:inflate(Z1, <<2,1,2,1,2>>)),
+ ?m(ok, zlib:close(Z1)).
+
+%% Test inflate/3; same as inflate/2 but with the default options inverted.
+api_inflate3(Config) when is_list(Config) ->
+ Data = [<<1,2,2,3,3,3,4,4,4,4>>],
+ Options = [{exception_on_need_dict, false}],
+ Compressed = zlib:compress(Data),
+ Z1 = zlib:open(),
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m([], zlib:inflate(Z1, <<>>, Options)),
+ ?m(Data, zlib:inflate(Z1, Compressed)),
+ ?m(ok, zlib:inflateEnd(Z1)),
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m(Data, zlib:inflate(Z1, Compressed, Options)),
+ ?m(?EXIT(badarg), zlib:inflate(gurka, Compressed, Options)),
+ ?m(?EXIT(badarg), zlib:inflate(Z1, 4384, Options)),
+ ?m(?EXIT(badarg), zlib:inflate(Z1, [atom_list], Options)),
+ ?m(ok, zlib:inflateEnd(Z1)),
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m(?EXIT(data_error), zlib:inflate(Z1, <<2,1,2,1,2>>, Options)),
?m(ok, zlib:close(Z1)).
%% Test inflateChunk.
@@ -388,69 +403,105 @@ api_inflateChunk(Config) when is_list(Config) ->
Part1 = binary:part(Data, 0, ChunkSize),
Part2 = binary:part(Data, ChunkSize, ChunkSize),
Part3 = binary:part(Data, ChunkSize * 2, ChunkSize),
+
Compressed = zlib:compress(Data),
Z1 = zlib:open(),
+
zlib:setBufSize(Z1, ChunkSize),
+
?m(ok, zlib:inflateInit(Z1)),
- ?m([], zlib:inflateChunk(Z1, <<>>)),
- ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)),
- ?m({more, Part2}, zlib:inflateChunk(Z1)),
- ?m(Part3, zlib:inflateChunk(Z1)),
- ?m(ok, zlib:inflateEnd(Z1)),
+ 0 = iolist_size(zlib:inflateChunk(Z1, <<>>)),
+
+ {more, Part1AsIOList} = zlib:inflateChunk(Z1, Compressed),
+ {more, Part2AsIOList} = zlib:inflateChunk(Z1),
+ {more, Part3AsIOList} = zlib:inflateChunk(Z1),
+ [] = zlib:inflateChunk(Z1),
+
+ ?m(Part1, iolist_to_binary(Part1AsIOList)),
+ ?m(Part2, iolist_to_binary(Part2AsIOList)),
+ ?m(Part3, iolist_to_binary(Part3AsIOList)),
+
+ ?m(ok, zlib:inflateEnd(Z1)),
?m(ok, zlib:inflateInit(Z1)),
- ?m({more, Part1}, zlib:inflateChunk(Z1, Compressed)),
+
+ ?m({more, Part1AsIOList}, zlib:inflateChunk(Z1, Compressed)),
?m(ok, zlib:inflateReset(Z1)),
- zlib:setBufSize(Z1, size(Data)),
- ?m(Data, zlib:inflateChunk(Z1, Compressed)),
- ?m(ok, zlib:inflateEnd(Z1)),
+ zlib:setBufSize(Z1, byte_size(Data) + 1),
+
+ DataAsIOList = zlib:inflateChunk(Z1, Compressed),
+ ?m(Data, iolist_to_binary(DataAsIOList)),
+ ?m(ok, zlib:inflateEnd(Z1)),
?m(ok, zlib:inflateInit(Z1)),
- ?m(?BARG, zlib:inflateChunk(gurka, Compressed)),
- ?m(?BARG, zlib:inflateChunk(Z1, 4384)),
- ?m({'EXIT',{data_error,_}}, zlib:inflateEnd(Z1)),
+
+ ?m(?EXIT(badarg), zlib:inflateChunk(gurka, Compressed)),
+ ?m(?EXIT(badarg), zlib:inflateChunk(Z1, 4384)),
+
+ ?m(?EXIT(data_error), zlib:inflateEnd(Z1)),
+
?m(ok, zlib:close(Z1)).
-%% Test inflateEnd.
-api_inflateEnd(Config) when is_list(Config) ->
+%% Test safeInflate as a mirror of inflateChunk, but ignore the stuff about
+%% exact chunk sizes.
+api_safeInflate(Config) when is_list(Config) ->
+ Data = << <<(I rem 150)>> || I <- lists:seq(1, 20 bsl 10) >>,
+ Compressed = zlib:compress(Data),
Z1 = zlib:open(),
- ?m({'EXIT',{einval,_}}, zlib:inflateEnd(Z1)),
- ?m(ok, zlib:inflateInit(Z1)),
- ?m(?BARG, zlib:inflateEnd(gurka)),
- ?m({'EXIT',{data_error,_}}, zlib:inflateEnd(Z1)),
- ?m({'EXIT',{einval,_}}, zlib:inflateEnd(Z1)),
+
?m(ok, zlib:inflateInit(Z1)),
- ?m(B when is_list(B), zlib:inflate(Z1, zlib:compress("abc"))),
+
+ SafeInflateLoop =
+ fun
+ Loop({continue, Chunk}, Output) ->
+ Loop(zlib:safeInflate(Z1, []), [Output, Chunk]);
+ Loop({finished, Chunk}, Output) ->
+ [Output, Chunk]
+ end,
+
+ Decompressed = SafeInflateLoop(zlib:safeInflate(Z1, Compressed), []),
+ Data = iolist_to_binary(Decompressed),
+
?m(ok, zlib:inflateEnd(Z1)),
- ?m(ok, zlib:close(Z1)).
+ ?m(ok, zlib:inflateInit(Z1)),
-%% Test getBufsz.
-api_getBufsz(Config) when is_list(Config) ->
- Z1 = zlib:open(),
- ?m(Val when is_integer(Val), zlib:getBufSize(Z1)),
- ?m(?BARG, zlib:getBufSize(gurka)),
- ?m(ok, zlib:close(Z1)).
+ {continue, Partial} = zlib:safeInflate(Z1, Compressed),
+ PBin = iolist_to_binary(Partial),
+ PSize = byte_size(PBin),
+ <<PBin:PSize/binary, Rest/binary>> = Data,
-%% Test setBufsz.
-api_setBufsz(Config) when is_list(Config) ->
- Z1 = zlib:open(),
- ?m(?BARG, zlib:setBufSize(Z1, gurka)),
- ?m(?BARG, zlib:setBufSize(gurka, 1232330)),
- Sz = ?m( Val when is_integer(Val), zlib:getBufSize(Z1)),
- ?m(ok, zlib:setBufSize(Z1, Sz*2)),
- DSz = Sz*2,
- ?m(DSz, zlib:getBufSize(Z1)),
+ ?m(ok, zlib:inflateReset(Z1)),
+
+ {continue, Partial} = zlib:safeInflate(Z1, Compressed),
+ PBin = iolist_to_binary(Partial),
+ PSize = byte_size(PBin),
+ <<PBin:PSize/binary, Rest/binary>> = Data,
+
+ ?m(ok, zlib:inflateReset(Z1)),
+
+ SafeInflateLoop(zlib:safeInflate(Z1, Compressed), []),
+
+ ?m(?EXIT(data_error), zlib:safeInflate(Z1, Compressed)),
+
+ ?m(ok, zlib:inflateReset(Z1)),
+ ?m(?EXIT(badarg), zlib:safeInflate(gurka, Compressed)),
+ ?m(?EXIT(badarg), zlib:safeInflate(Z1, 4384)),
+ ?m(?EXIT(data_error), zlib:inflateEnd(Z1)),
?m(ok, zlib:close(Z1)).
-%%% Debug function ??
-%% Test getQSize.
-api_getQSize(Config) when is_list(Config) ->
+%% Test inflateEnd.
+api_inflateEnd(Config) when is_list(Config) ->
Z1 = zlib:open(),
- Q = ?m(Val when is_integer(Val), zlib:getQSize(Z1)),
- io:format("QSize ~p ~n", [Q]),
- ?m(?BARG, zlib:getQSize(gurka)),
+ ?m(?EXIT(not_initialized), zlib:inflateEnd(Z1)),
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m(?EXIT(badarg), zlib:inflateEnd(gurka)),
+ ?m(?EXIT(data_error), zlib:inflateEnd(Z1)),
+ ?m(?EXIT(not_initialized), zlib:inflateEnd(Z1)),
+ ?m(ok, zlib:inflateInit(Z1)),
+ ?m(B when is_list(B), zlib:inflate(Z1, zlib:compress("abc"))),
+ ?m(ok, zlib:inflateEnd(Z1)),
?m(ok, zlib:close(Z1)).
%% Test crc32.
@@ -458,8 +509,8 @@ api_crc32(Config) when is_list(Config) ->
Z1 = zlib:open(),
?m(ok, zlib:deflateInit(Z1,best_speed,deflated,-15,8,default)),
Bin = <<1,1,1,1,1,1,1,1,1>>,
- Compressed1 = ?m(_, zlib:deflate(Z1, Bin, none)),
- Compressed2 = ?m(_, zlib:deflate(Z1, <<>>, finish)),
+ Compressed1 = ?m(L when is_list(L), zlib:deflate(Z1, Bin, none)),
+ Compressed2 = ?m(L when is_list(L), zlib:deflate(Z1, <<>>, finish)),
Compressed = list_to_binary(Compressed1 ++ Compressed2),
CRC1 = ?m( CRC1 when is_integer(CRC1), zlib:crc32(Z1)),
?m(CRC1 when is_integer(CRC1), zlib:crc32(Z1,Bin)),
@@ -467,15 +518,15 @@ api_crc32(Config) when is_list(Config) ->
?m(CRC2 when is_integer(CRC2), zlib:crc32(Z1,Compressed)),
CRC2 = ?m(CRC2 when is_integer(CRC2), zlib:crc32(Z1,0,Compressed)),
?m(CRC3 when CRC2 /= CRC3, zlib:crc32(Z1,234,Compressed)),
- ?m(?BARG, zlib:crc32(gurka)),
- ?m(?BARG, zlib:crc32(Z1, not_a_binary)),
- ?m(?BARG, zlib:crc32(gurka, <<1,1,2,4,4>>)),
- ?m(?BARG, zlib:crc32(Z1, 2298929, not_a_binary)),
- ?m(?BARG, zlib:crc32(Z1, not_an_int, <<123,123,123,35,231>>)),
- ?m(?BARG, zlib:crc32_combine(Z1, not_an_int, 123123, 123)),
- ?m(?BARG, zlib:crc32_combine(Z1, noint, 123123, 123)),
- ?m(?BARG, zlib:crc32_combine(Z1, 123123, noint, 123)),
- ?m(?BARG, zlib:crc32_combine(Z1, 123123, 123, noint)),
+ ?m(?EXIT(badarg), zlib:crc32(gurka)),
+ ?m(?EXIT(badarg), zlib:crc32(Z1, not_a_binary)),
+ ?m(?EXIT(badarg), zlib:crc32(gurka, <<1,1,2,4,4>>)),
+ ?m(?EXIT(badarg), zlib:crc32(Z1, 2298929, not_a_binary)),
+ ?m(?EXIT(badarg), zlib:crc32(Z1, not_an_int, <<123,123,123,35,231>>)),
+ ?m(?EXIT(badarg), zlib:crc32_combine(Z1, not_an_int, 123123, 123)),
+ ?m(?EXIT(badarg), zlib:crc32_combine(Z1, noint, 123123, 123)),
+ ?m(?EXIT(badarg), zlib:crc32_combine(Z1, 123123, noint, 123)),
+ ?m(?EXIT(badarg), zlib:crc32_combine(Z1, 123123, 123, noint)),
?m(ok, zlib:deflateEnd(Z1)),
?m(ok, zlib:close(Z1)).
@@ -484,74 +535,115 @@ api_adler32(Config) when is_list(Config) ->
Z1 = zlib:open(),
?m(ok, zlib:deflateInit(Z1,best_speed,deflated,-15,8,default)),
Bin = <<1,1,1,1,1,1,1,1,1>>,
- Compressed1 = ?m(_, zlib:deflate(Z1, Bin, none)),
- Compressed2 = ?m(_, zlib:deflate(Z1, <<>>, finish)),
+ Compressed1 = ?m(L when is_list(L), zlib:deflate(Z1, Bin, none)),
+ Compressed2 = ?m(L when is_list(L), zlib:deflate(Z1, <<>>, finish)),
Compressed = list_to_binary(Compressed1 ++ Compressed2),
?m(ADLER1 when is_integer(ADLER1), zlib:adler32(Z1,Bin)),
?m(ADLER1 when is_integer(ADLER1), zlib:adler32(Z1,binary_to_list(Bin))),
ADLER2 = ?m(ADLER2 when is_integer(ADLER2), zlib:adler32(Z1,Compressed)),
?m(ADLER2 when is_integer(ADLER2), zlib:adler32(Z1,1,Compressed)),
?m(ADLER3 when ADLER2 /= ADLER3, zlib:adler32(Z1,234,Compressed)),
- ?m(?BARG, zlib:adler32(Z1, not_a_binary)),
- ?m(?BARG, zlib:adler32(gurka, <<1,1,2,4,4>>)),
- ?m(?BARG, zlib:adler32(Z1, 2298929, not_a_binary)),
- ?m(?BARG, zlib:adler32(Z1, not_an_int, <<123,123,123,35,231>>)),
- ?m(?BARG, zlib:adler32_combine(Z1, noint, 123123, 123)),
- ?m(?BARG, zlib:adler32_combine(Z1, 123123, noint, 123)),
- ?m(?BARG, zlib:adler32_combine(Z1, 123123, 123, noint)),
+ ?m(?EXIT(badarg), zlib:adler32(Z1, not_a_binary)),
+ ?m(?EXIT(badarg), zlib:adler32(gurka, <<1,1,2,4,4>>)),
+ ?m(?EXIT(badarg), zlib:adler32(Z1, 2298929, not_a_binary)),
+ ?m(?EXIT(badarg), zlib:adler32(Z1, not_an_int, <<123,123,123,35,231>>)),
+ ?m(?EXIT(badarg), zlib:adler32_combine(Z1, noint, 123123, 123)),
+ ?m(?EXIT(badarg), zlib:adler32_combine(Z1, 123123, noint, 123)),
+ ?m(?EXIT(badarg), zlib:adler32_combine(Z1, 123123, 123, noint)),
?m(ok, zlib:deflateEnd(Z1)),
?m(ok, zlib:close(Z1)).
%% Test compress.
api_un_compress(Config) when is_list(Config) ->
- ?m(?BARG,zlib:compress(not_a_binary)),
+ ?m(?EXIT(badarg),zlib:compress(not_a_binary)),
Bin = <<1,11,1,23,45>>,
Comp = zlib:compress(Bin),
- ?m(?BARG,zlib:uncompress(not_a_binary)),
- ?m({'EXIT',{data_error,_}}, zlib:uncompress(<<171,171,171,171,171>>)),
- ?m({'EXIT',{data_error,_}}, zlib:uncompress(<<>>)),
- ?m({'EXIT',{data_error,_}}, zlib:uncompress(<<120>>)),
- ?m({'EXIT',{data_error,_}}, zlib:uncompress(<<120,156>>)),
- ?m({'EXIT',{data_error,_}}, zlib:uncompress(<<120,156,3>>)),
- ?m({'EXIT',{data_error,_}}, zlib:uncompress(<<120,156,3,0>>)),
- ?m({'EXIT',{data_error,_}}, zlib:uncompress(<<0,156,3,0,0,0,0,1>>)),
+ ?m(?EXIT(badarg),zlib:uncompress(not_a_binary)),
+ ?m(?EXIT(data_error), zlib:uncompress(<<171,171,171,171,171>>)),
+ ?m(?EXIT(data_error), zlib:uncompress(<<>>)),
+ ?m(?EXIT(data_error), zlib:uncompress(<<120>>)),
+ ?m(?EXIT(data_error), zlib:uncompress(<<120,156>>)),
+ ?m(?EXIT(data_error), zlib:uncompress(<<120,156,3>>)),
+ ?m(?EXIT(data_error), zlib:uncompress(<<120,156,3,0>>)),
+ ?m(?EXIT(data_error), zlib:uncompress(<<0,156,3,0,0,0,0,1>>)),
?m(Bin, zlib:uncompress(binary_to_list(Comp))),
?m(Bin, zlib:uncompress(Comp)).
%% Test zip.
api_un_zip(Config) when is_list(Config) ->
- ?m(?BARG,zlib:zip(not_a_binary)),
+ ?m(?EXIT(badarg),zlib:zip(not_a_binary)),
Bin = <<1,11,1,23,45>>,
Comp = zlib:zip(Bin),
?m(Comp, zlib:zip(binary_to_list(Bin))),
- ?m(?BARG,zlib:unzip(not_a_binary)),
- ?m({'EXIT',{data_error,_}}, zlib:unzip(<<171,171,171,171,171>>)),
- ?m({'EXIT',{data_error,_}}, zlib:unzip(<<>>)),
+ ?m(?EXIT(badarg),zlib:unzip(not_a_binary)),
+ ?m(?EXIT(data_error), zlib:unzip(<<171,171,171,171,171>>)),
+ ?m(?EXIT(data_error), zlib:unzip(<<>>)),
?m(Bin, zlib:unzip(Comp)),
?m(Bin, zlib:unzip(binary_to_list(Comp))),
%% OTP-6396
- B = <<131,104,19,100,0,13,99,95,99,105,100,95,99,115,103,115,110,95,50,97,1,107,0,4,208,161,246,29,107,0,3,237,166,224,107,0,6,66,240,153,0,2,10,1,0,8,97,116,116,97,99,104,101,100,104,2,100,0,22,117,112,100,97,116,101,95,112,100,112,95,99,111,110,116,101,120,116,95,114,101,113,107,0,114,69,3,12,1,11,97,31,113,150,64,104,132,61,64,104,12,3,197,31,113,150,64,104,132,61,64,104,12,1,11,97,31,115,150,64,104,116,73,64,104,0,0,0,0,0,0,65,149,16,61,65,149,16,61,1,241,33,4,5,0,33,4,4,10,6,10,181,4,10,6,10,181,38,15,99,111,109,109,97,110,100,1,114,45,97,112,110,45,49,3,99,111,109,5,109,110,99,57,57,6,109,99,99,50,52,48,4,103,112,114,115,8,0,104,2,104,2,100,0,8,97,99,116,105,118,97,116,101,104,23,100,0,11,112,100,112,95,99,111,110,116,1,120,116,100,0,7,112,114,105,109,97,114,121,97,1,100,0,9,117,110,100,101,102,105,110,101,100,97,1,97,4,97,4,97,7,100,0,9,117,110,100,101,102,105,110,101,100,100,0,9,117,110,100,101,102,105,110,10100,100,0,9,117,110,100,101,102,105,110,101,100,100,0,5,102,97,108,115,101,100,0,9,117,110,100,101,102,105,110,101,100,100,0,9,117,110,100,101,102,105,110,101,100,100,0,9,117,110,100,101,102,105,1,101,100,97,0,100,0,9,117,110,100,101,102,105,110,101,100,107,0,4,16,0,1,144,107,0,4,61,139,186,181,107,0,4,10,8,201,49,100,0,9,117,110,100,101,102,105,110,101,100,100,0,9,117,110,100,101,102,105,0,101,100,100,0,9,117,110,100,101,102,105,110,101,100,104,2,104,3,98,0,0,7,214,97,11,97,20,104,3,97,17,97,16,97,21,106,108,0,0,0,3,104,2,97,1,104,2,104,3,98,0,0,7,214,97,11,97,20,104,3,97,17,97,167,20,104,2,97,4,104,2,104,3,98,0,0,7,214,97,11,97,20,104,3,97,17,97,16,97,21,104,2,97,10,104,2,104,3,98,0,0,7,214,97,11,97,20,104,3,97,17,97,16,97,26,106,100,0,5,118,101,114,57,57,100,0,9,117,110,0,101,102,105,110,101,100,107,0,2,0,244,107,0,4,10,6,102,195,107,0,4,10,6,102,195,100,0,9,117,110,100,101,102,105,110,101,100,100,0,9,117,110,100,101,102,105,110,101,100,107,0,125,248,143,0,203,25115,157,116,65,185,65,172,55,87,164,88,225,50,203,251,115,157,116,65,185,65,172,55,87,164,88,225,50,0,0,82,153,50,0,200,98,87,148,237,193,185,65,149,167,69,144,14,16,153,50,3,81,70,94,13,109,193,1,120,5,181,113,198,118,50,3,81,70,94,13,109,193,185,120,5,181,113,198,118,153,3,81,70,94,13,109,193,185,120,5,181,113,198,118,153,50,16,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,113,92,2,119,128,0,0,108,0,0,1,107,0,114,69,3,12,1,11,97,31,113,150,64,104,132,61,64,104,12,3,11,97,31,113,150,64,104,132,61,64,104,12,1,11,97,31,115,150,64,104,116,73,64,104,0,0,0,0,0,0,65,149,16,61,65,149,16,61,1,241,33,4,0,33,4,4,10,6,10,181,4,10,6,10,181,38,15,99,111,109,109,97,110,100,101,114,45,97,112,110,45,49,3,99,111,109,5,109,110,99,57,57,6,109,99,99,50,52,48,4,103,112,114,115,8,0,106>>,
+ B =
+ <<131,104,19,100,0,13,99,95,99,105,100,95,99,115,103,115,110,95,50,97,
+ 1,107,0,4,208,161,246,29,107,0,3,237,166,224,107,0,6,66,240,153,0,2,
+ 10,1,0,8,97,116,116,97,99,104,101,100,104,2,100,0,22,117,112,100,97,
+ 116,101,95,112,100,112,95,99,111,110,116,101,120,116,95,114,101,113,
+ 107,0,114,69,3,12,1,11,97,31,113,150,64,104,132,61,64,104,12,3,197,
+ 31,113,150,64,104,132,61,64,104,12,1,11,97,31,115,150,64,104,116,73,
+ 64,104,0,0,0,0,0,0,65,149,16,61,65,149,16,61,1,241,33,4,5,0,33,4,4,10
+ ,6,10,181,4,10,6,10,181,38,15,99,111,109,109,97,110,100,1,114,45,97,
+ 112,110,45,49,3,99,111,109,5,109,110,99,57,57,6,109,99,99,50,52,48,4,
+ 103,112,114,115,8,0,104,2,104,2,100,0,8,97,99,116,105,118,97,116,101,
+ 104,23,100,0,11,112,100,112,95,99,111,110,116,1,120,116,100,0,7,112,
+ 114,105,109,97,114,121,97,1,100,0,9,117,110,100,101,102,105,110,101,
+ 100,97,1,97,4,97,4,97,7,100,0,9,117,110,100,101,102,105,110,101,100,
+ 100,0,9,117,110,100,101,102,105,110,10100,100,0,9,117,110,100,101,
+ 102,105,110,101,100,100,0,5,102,97,108,115,101,100,0,9,117,110,100,
+ 101,102,105,110,101,100,100,0,9,117,110,100,101,102,105,110,101,100,
+ 100,0,9,117,110,100,101,102,105,1,101,100,97,0,100,0,9,117,110,100,
+ 101,102,105,110,101,100,107,0,4,16,0,1,144,107,0,4,61,139,186,181,
+ 107,0,4,10,8,201,49,100,0,9,117,110,100,101,102,105,110,101,100,100,
+ 0,9,117,110,100,101,102,105,0,101,100,100,0,9,117,110,100,101,102,
+ 105,110,101,100,104,2,104,3,98,0,0,7,214,97,11,97,20,104,3,97,17,97,
+ 16,97,21,106,108,0,0,0,3,104,2,97,1,104,2,104,3,98,0,0,7,214,97,11,
+ 97,20,104,3,97,17,97,167,20,104,2,97,4,104,2,104,3,98,0,0,7,214,97,
+ 11,97,20,104,3,97,17,97,16,97,21,104,2,97,10,104,2,104,3,98,0,0,7,
+ 214,97,11,97,20,104,3,97,17,97,16,97,26,106,100,0,5,118,101,114,57,
+ 57,100,0,9,117,110,0,101,102,105,110,101,100,107,0,2,0,244,107,0,4,
+ 10,6,102,195,107,0,4,10,6,102,195,100,0,9,117,110,100,101,102,105,
+ 110,101,100,100,0,9,117,110,100,101,102,105,110,101,100,107,0,125,
+ 248,143,0,203,25115,157,116,65,185,65,172,55,87,164,88,225,50,203,
+ 251,115,157,116,65,185,65,172,55,87,164,88,225,50,0,0,82,153,50,0,
+ 200,98,87,148,237,193,185,65,149,167,69,144,14,16,153,50,3,81,70,94,
+ 13,109,193,1,120,5,181,113,198,118,50,3,81,70,94,13,109,193,185,120,
+ 5,181,113,198,118,153,3,81,70,94,13,109,193,185,120,5,181,113,198,
+ 118,153,50,16,1,2,3,4,5,6,7,8,9,0,1,2,3,4,5,6,113,92,2,119,128,0,0,
+ 108,0,0,1,107,0,114,69,3,12,1,11,97,31,113,150,64,104,132,61,64,104,
+ 12,3,11,97,31,113,150,64,104,132,61,64,104,12,1,11,97,31,115,150,64,
+ 104,116,73,64,104,0,0,0,0,0,0,65,149,16,61,65,149,16,61,1,241,33,4,0,
+ 33,4,4,10,6,10,181,4,10,6,10,181,38,15,99,111,109,109,97,110,100,101,
+ 114,45,97,112,110,45,49,3,99,111,109,5,109,110,99,57,57,6,109,99,99,
+ 50,52,48,4,103,112,114,115,8,0,106>>,
+
Z = zlib:zip(B),
?m(B, zlib:unzip(Z)).
%% Test gunzip.
api_g_un_zip(Config) when is_list(Config) ->
- ?m(?BARG,zlib:gzip(not_a_binary)),
+ ?m(?EXIT(badarg),zlib:gzip(not_a_binary)),
Bin = <<1,11,1,23,45>>,
Comp = zlib:gzip(Bin),
?m(Comp, zlib:gzip(binary_to_list(Bin))),
- ?m(?BARG, zlib:gunzip(not_a_binary)),
- ?m(?DATA_ERROR, zlib:gunzip(<<171,171,171,171,171>>)),
- ?m(?DATA_ERROR, zlib:gunzip(<<>>)),
+ ?m(?EXIT(badarg), zlib:gunzip(not_a_binary)),
+ ?m(?EXIT(data_error), zlib:gunzip(<<171,171,171,171,171>>)),
+ ?m(?EXIT(data_error), zlib:gunzip(<<>>)),
?m(Bin, zlib:gunzip(Comp)),
?m(Bin, zlib:gunzip(binary_to_list(Comp))),
%% Bad CRC; bad length.
BadCrc = bad_crc_data(),
- ?m({'EXIT',{data_error,_}},(catch zlib:gunzip(BadCrc))),
+ ?m(?EXIT(data_error),(catch zlib:gunzip(BadCrc))),
BadLen = bad_len_data(),
- ?m({'EXIT',{data_error,_}},(catch zlib:gunzip(BadLen))),
+ ?m(?EXIT(data_error),(catch zlib:gunzip(BadLen))),
ok.
bad_crc_data() ->
@@ -594,30 +686,15 @@ intro(Config) when is_list(Config) ->
large_deflate(Config) when is_list(Config) ->
large_deflate_do().
large_deflate_do() ->
- Z = zlib:open(),
- Plain = rand_bytes(zlib:getBufSize(Z)*5),
- ok = zlib:deflateInit(Z),
- _ZlibHeader = zlib:deflate(Z, [], full),
- Deflated = zlib:deflate(Z, Plain, full),
- ?m(ok, zlib:close(Z)),
- ?m(Plain, zlib:unzip(list_to_binary([Deflated, 3, 0]))).
-
-rand_bytes(Sz) ->
- L = <<8,2,3,6,1,2,3,2,3,4,8,7,3,7,2,3,4,7,5,8,9,3>>,
- rand_bytes(erlang:md5(L),Sz).
-
-rand_bytes(Bin, Sz) when byte_size(Bin) >= Sz ->
- <<Res:Sz/binary, _/binary>> = Bin,
- Res;
-rand_bytes(Bin, Sz) ->
- rand_bytes(<<(erlang:md5(Bin))/binary, Bin/binary>>, Sz).
-
+ Plain = gen_determ_rand_bytes(64 bsl 10),
+ Deflated = zlib:zip(Plain),
+ ?m(Plain, zlib:unzip(Deflated)).
%% Test a standard compressed zip file.
zip_usage(Config) when is_list(Config) ->
zip_usage(zip_usage({get_arg,Config}));
zip_usage({get_arg,Config}) ->
- Out = conf(data_dir,Config),
+ Out = get_data_dir(Config),
{ok,ZIP} = file:read_file(filename:join(Out,"zipdoc.zip")),
{ok,ORIG} = file:read_file(filename:join(Out,"zipdoc")),
{run,ZIP,ORIG};
@@ -688,7 +765,7 @@ zip_usage({run,ZIP,ORIG}) ->
gz_usage(Config) when is_list(Config) ->
gz_usage(gz_usage({get_arg,Config}));
gz_usage({get_arg,Config}) ->
- Out = conf(data_dir,Config),
+ Out = get_data_dir(Config),
{ok,GZIP} = file:read_file(filename:join(Out,"zipdoc.1.gz")),
{ok,ORIG} = file:read_file(filename:join(Out,"zipdoc")),
{ok,GZIP2} = file:read_file(filename:join(Out,"zipdoc.txt.gz")),
@@ -709,7 +786,7 @@ gz_usage2(Config) ->
case os:find_executable("gzip") of
Name when is_list(Name) ->
Z = zlib:open(),
- Out = conf(data_dir,Config),
+ Out = get_data_dir(Config),
{ok,ORIG} = file:read_file(filename:join(Out,"zipdoc")),
Compressed = zlib:gzip(ORIG),
GzOutFile = filename:join(Out,"out.gz"),
@@ -737,7 +814,7 @@ gz_usage2(Config) ->
compress_usage(Config) when is_list(Config) ->
compress_usage(compress_usage({get_arg,Config}));
compress_usage({get_arg,Config}) ->
- Out = conf(data_dir,Config),
+ Out = get_data_dir(Config),
{ok,C1} = file:read_file(filename:join(Out,"png-compressed.zlib")),
{run,C1};
compress_usage({run,C1}) ->
@@ -792,7 +869,7 @@ compress_usage({run,C1}) ->
crc(Config) when is_list(Config) ->
crc(crc({get_arg,Config}));
crc({get_arg,Config}) ->
- Out = conf(data_dir,Config),
+ Out = get_data_dir(Config),
{ok,C1} = file:read_file(filename:join(Out,"zipdoc")),
{run,C1};
crc({run,C1}) ->
@@ -821,7 +898,7 @@ crc({run,C1}) ->
adler(Config) when is_list(Config) ->
adler(adler({get_arg,Config}));
adler({get_arg,Config}) ->
- Out = conf(data_dir,Config),
+ Out = get_data_dir(Config),
File1 = filename:join(Out,"zipdoc"),
{ok,C1} = file:read_file(File1),
{run,C1};
@@ -869,10 +946,14 @@ dictionary_usage({run}) ->
%% Now uncompress.
Z2 = zlib:open(),
?m(ok, zlib:inflateInit(Z2)),
- {'EXIT',{{need_dictionary,DictID},_}} = (catch zlib:inflate(Z2, Compressed)),
+
+ ?m(?EXIT({need_dictionary, DictID}), zlib:inflate(Z2, Compressed)),
+
?m(ok, zlib:inflateSetDictionary(Z2, Dict)),
?m(ok, zlib:inflateSetDictionary(Z2, binary_to_list(Dict))),
+
Uncompressed = ?m(B when is_list(B), zlib:inflate(Z2, [])),
+
?m(ok, zlib:inflateEnd(Z2)),
?m(ok, zlib:close(Z2)),
?m(Data, list_to_binary(Uncompressed)).
@@ -882,33 +963,64 @@ split_bin(<<Part:1997/binary,Rest/binary>>, Acc) ->
split_bin(Last,Acc) ->
lists:reverse([Last|Acc]).
+only_allow_owner(Config) when is_list(Config) ->
+ Z = zlib:open(),
-%% Check concurrent access to zlib driver.
-smp(Config) ->
- case erlang:system_info(smp_support) of
- true ->
- NumOfProcs = lists:min([8,erlang:system_info(schedulers)]),
- io:format("smp starting ~p workers\n",[NumOfProcs]),
+ ?m(ok, zlib:inflateInit(Z)),
+ ?m(ok, zlib:inflateReset(Z)),
- %% Tests to run in parallel.
- Funcs = [zip_usage, gz_usage, compress_usage, dictionary_usage,
- crc, adler],
+ {Pid, Ref} = spawn_monitor(
+ fun() ->
+ ?m(?EXIT(not_on_controlling_process), zlib:inflateReset(Z))
+ end),
- %% We get all function arguments here to avoid repeated parallel
- %% file read access.
- FnAList = lists:map(fun(F) -> {F,?MODULE:F({get_arg,Config})}
- end, Funcs),
+ receive
+ {'DOWN', Ref, process, Pid, _Reason} ->
+ ok
+ after 200 ->
+ ct:fail("Spawned worker timed out.")
+ end,
- Pids = [spawn_link(?MODULE, worker, [rand:uniform(9999),
- list_to_tuple(FnAList),
- self()])
- || _ <- lists:seq(1,NumOfProcs)],
- wait_pids(Pids);
+ ?m(ok, zlib:inflateReset(Z)).
- false ->
- {skipped,"No smp support"}
- end.
+sub_heap_binaries(Config) when is_list(Config) ->
+ Compressed = zlib:compress(<<"gurka">>),
+ ConfLen = erlang:length(Config),
+
+ HeapBin = <<ConfLen:8/integer, Compressed/binary>>,
+ <<_:8/integer, SubHeapBin/binary>> = HeapBin,
+
+ ?m(<<"gurka">>, zlib:uncompress(SubHeapBin)),
+ ok.
+%% Check concurrent access to zlib driver.
+smp(Config) ->
+ case erlang:system_info(smp_support) of
+ true ->
+ NumOfProcs = lists:min([8,erlang:system_info(schedulers)]),
+ io:format("smp starting ~p workers\n",[NumOfProcs]),
+
+ %% Tests to run in parallel.
+ Funcs =
+ [zip_usage, gz_usage, compress_usage, dictionary_usage,
+ crc, adler],
+
+ %% We get all function arguments here to avoid repeated parallel
+ %% file read access.
+ UsageArgs =
+ list_to_tuple([{F, ?MODULE:F({get_arg,Config})} || F <- Funcs]),
+ Parent = self(),
+
+ WorkerFun =
+ fun() ->
+ worker(rand:uniform(9999), UsageArgs, Parent)
+ end,
+
+ Pids = [spawn_link(WorkerFun) || _ <- lists:seq(1, NumOfProcs)],
+ wait_pids(Pids);
+ false ->
+ {skipped,"No smp support"}
+ end.
worker(Seed, FnATpl, Parent) ->
io:format("smp worker ~p, seed=~p~n",[self(),Seed]),
@@ -999,43 +1111,98 @@ otp_9981(Config) when is_list(Config) ->
Ports = lists:sort(erlang:ports()),
ok.
+-define(BENCH_SIZE, (16 bsl 20)).
+
+-define(DECOMPRESS_BENCH(Name, What, Data),
+ Name(Config) when is_list(Config) ->
+ Uncompressed = Data,
+ Compressed = zlib:compress(Uncompressed),
+ What(Compressed, byte_size(Uncompressed))).
+
+-define(COMPRESS_BENCH(Name, What, Data),
+ Name(Config) when is_list(Config) ->
+ Compressed = Data,
+ What(Compressed, byte_size(Compressed))).
+
+?DECOMPRESS_BENCH(inflate_bench_zeroed, throughput_bench_inflate,
+ <<0:(8 * ?BENCH_SIZE)>>).
+?DECOMPRESS_BENCH(inflate_bench_rand, throughput_bench_inflate,
+ gen_determ_rand_bytes(?BENCH_SIZE)).
+
+?DECOMPRESS_BENCH(chunk_bench_zeroed, throughput_bench_chunk,
+ <<0:(8 * ?BENCH_SIZE)>>).
+?DECOMPRESS_BENCH(chunk_bench_rand, throughput_bench_chunk,
+ gen_determ_rand_bytes(?BENCH_SIZE)).
+?COMPRESS_BENCH(deflate_bench_zeroed, throughput_bench_deflate,
+ <<0:(8 * ?BENCH_SIZE)>>).
+?COMPRESS_BENCH(deflate_bench_rand, throughput_bench_deflate,
+ gen_determ_rand_bytes(?BENCH_SIZE)).
+
+throughput_bench_inflate(Compressed, Size) ->
+ Z = zlib:open(),
+ zlib:inflateInit(Z),
+
+ submit_throughput_results(Size,
+ fun() ->
+ zlib:inflate(Z, Compressed)
+ end).
+
+throughput_bench_deflate(Uncompressed, Size) ->
+ Z = zlib:open(),
+ zlib:deflateInit(Z),
+
+ submit_throughput_results(Size,
+ fun() ->
+ zlib:deflate(Z, Uncompressed, finish)
+ end).
+
+throughput_bench_chunk(Compressed, Size) ->
+ Z = zlib:open(),
+ zlib:inflateInit(Z),
+
+ ChunkLoop =
+ fun
+ Loop({more, _}) -> Loop(zlib:inflateChunk(Z));
+ Loop(_) -> ok
+ end,
+
+ submit_throughput_results(Size,
+ fun() ->
+ ChunkLoop(zlib:inflateChunk(Z, Compressed))
+ end).
+
+submit_throughput_results(Size, Fun) ->
+ TimeTaken = measure_perf_counter(Fun, millisecond),
+
+ KBPS = trunc((Size bsr 10) / (TimeTaken / 1000)),
+ ct_event:notify(#event{ name = benchmark_data, data = [{value,KBPS}] }),
+ {comment, io_lib:format("~p ms, ~p KBPS", [TimeTaken, KBPS])}.
+
+measure_perf_counter(Fun, Unit) ->
+ Start = os:perf_counter(Unit),
+ Fun(),
+ os:perf_counter(Unit) - Start.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Helps with testing directly %%%%%%%%%%%%%
-conf(What,Config) ->
- try proplists:get_value(What,Config) of
- undefined ->
- "./zlib_SUITE_data";
- Dir ->
- Dir
+get_data_dir(Config) ->
+ try proplists:get_value(data_dir,Config) of
+ undefined ->
+ "./zlib_SUITE_data";
+ Dir ->
+ Dir
catch
- _:_ -> "./zlib_SUITE_data"
+ _:_ -> "./zlib_SUITE_data"
end.
-t() -> t([all]).
-
-t(What) when not is_list(What) ->
- t([What]);
-t(What) ->
- lists:foreach(fun(T) ->
- try ?MODULE:T([])
- catch _E:_R ->
- Line = get(test_server_loc),
- io:format("Failed ~p:~p ~p ~p ~p~n",
- [T,Line,_E,_R, erlang:get_stacktrace()])
- end
- end, expand(What)).
-
-expand(All) ->
- lists:reverse(expand(All,[])).
-expand([H|T], Acc) ->
- case ?MODULE:H(suite) of
- [] -> expand(T,[H|Acc]);
- Cs ->
- R = expand(Cs, Acc),
- expand(T, R)
- end;
-expand([], Acc) -> Acc.
-
+%% Generates a bunch of statistically random bytes using the size as seed.
+gen_determ_rand_bytes(Size) ->
+ gen_determ_rand_bytes(Size, erlang:md5_init(), <<>>).
+gen_determ_rand_bytes(Size, _Context, Acc) when Size =< 0 ->
+ Acc;
+gen_determ_rand_bytes(Size, Context0, Acc) when Size > 0 ->
+ Context = erlang:md5_update(Context0, <<Size/integer>>),
+ Checksum = erlang:md5_final(Context),
+ gen_determ_rand_bytes(Size - 16, Context, <<Acc/binary, Checksum/binary>>).
diff --git a/lib/kernel/vsn.mk b/lib/kernel/vsn.mk
index 4edecd8969..c9463241d1 100644
--- a/lib/kernel/vsn.mk
+++ b/lib/kernel/vsn.mk
@@ -1 +1 @@
-KERNEL_VSN = 5.3
+KERNEL_VSN = 5.3.1
diff --git a/lib/mnesia/doc/src/Mnesia_chap2.xmlsrc b/lib/mnesia/doc/src/Mnesia_chap2.xmlsrc
index 37389ce5ae..914e0e509c 100644
--- a/lib/mnesia/doc/src/Mnesia_chap2.xmlsrc
+++ b/lib/mnesia/doc/src/Mnesia_chap2.xmlsrc
@@ -327,7 +327,7 @@
<section>
<title>Initial Database Content</title>
<p>After the insertion of the employee named <c>klacke</c>,
- the databse has the following records:</p>
+ the database has the following records:</p>
<marker id="table2_1"></marker>
<table>
<row>
diff --git a/lib/mnesia/src/mnesia.erl b/lib/mnesia/src/mnesia.erl
index 3b771e8c5b..b68b2de028 100644
--- a/lib/mnesia/src/mnesia.erl
+++ b/lib/mnesia/src/mnesia.erl
@@ -2283,9 +2283,9 @@ list_index_plugins([{N,M,F} | T] = Ps, Legend) ->
lists:foldl(fun({N1,_,_}, Wa) ->
erlang:max(Wa, length(pp_ix_name(N1)))
end, 0, Ps)),
- io:fwrite(Legend ++ "~-" ++ W ++ "s - ~s:~s~n",
+ io:fwrite(Legend ++ "~-" ++ W ++ "s - ~s:~ts~n",
[pp_ix_name(N), atom_to_list(M), atom_to_list(F)]),
- [io:fwrite(Indent ++ "~-" ++ W ++ "s - ~s:~s~n",
+ [io:fwrite(Indent ++ "~-" ++ W ++ "s - ~s:~ts~n",
[pp_ix_name(N1), atom_to_list(M1), atom_to_list(F1)])
|| {N1,M1,F1} <- T].
diff --git a/lib/mnesia/src/mnesia_bup.erl b/lib/mnesia/src/mnesia_bup.erl
index 3e55deb958..34f16f178b 100644
--- a/lib/mnesia/src/mnesia_bup.erl
+++ b/lib/mnesia/src/mnesia_bup.erl
@@ -920,7 +920,7 @@ create_dat_files([{schema, Tab, TabDef} | Tail], Ext, LocalTabs) ->
ok ->
ok;
{error, Reason} ->
- mnesia_lib:fatal("Cannot rename file ~p -> ~p: ~p~n",
+ mnesia_lib:fatal("Cannot rename file ~tp -> ~tp: ~tp~n",
[TmpFile, DclFile, Reason])
end
end
@@ -1016,7 +1016,7 @@ disc_only_swap_fun(disc_only_copies, Expunge, Open, Close) ->
ok ->
ok;
{error, Reason} ->
- mnesia_lib:fatal("Cannot rename file ~p -> ~p: ~p~n",
+ mnesia_lib:fatal("Cannot rename file ~tp -> ~tp: ~tp~n",
[TmpFile, DatFile, Reason])
end
end;
diff --git a/lib/mnesia/src/mnesia_checkpoint.erl b/lib/mnesia/src/mnesia_checkpoint.erl
index 8659e4622c..2ff77326a9 100644
--- a/lib/mnesia/src/mnesia_checkpoint.erl
+++ b/lib/mnesia/src/mnesia_checkpoint.erl
@@ -683,14 +683,14 @@ retainer_create(_Cp, R, Tab, Name, Ext = {ext, Alias, Mod}) ->
Cs = val({Tab, cstruct}),
Mod:load_table(Alias, T, {retainer, create_table},
mnesia_schema:cs2list(Cs)),
- dbg_out("Checkpoint retainer created ~p ~p~n", [Name, Tab]),
+ dbg_out("Checkpoint retainer created ~p ~tp~n", [Name, Tab]),
R#retainer{store = {Ext, T}, really_retain = true};
retainer_create(_Cp, R, Tab, Name, disc_only_copies) ->
Fname = tab2retainer({Tab, Name}),
file:delete(Fname),
Args = [{file, Fname}, {type, set}, {keypos, 2}, {repair, false}],
{ok, _} = mnesia_lib:dets_sync_open({Tab, Name}, Args),
- dbg_out("Checkpoint retainer created ~p ~p~n", [Name, Tab]),
+ dbg_out("Checkpoint retainer created ~p ~tp~n", [Name, Tab]),
R#retainer{store = {dets, {Tab, Name}}, really_retain = true};
retainer_create(Cp, R, Tab, Name, Storage) ->
T = ?ets_new_table(mnesia_retainer, [set, public, {keypos, 2}]),
@@ -698,7 +698,7 @@ retainer_create(Cp, R, Tab, Name, Storage) ->
ReallyR = R#retainer.really_retain,
ReallyCp = lists:member(Tab, Overriders),
ReallyR2 = prepare_ram_tab(Tab, T, Storage, ReallyR, ReallyCp),
- dbg_out("Checkpoint retainer created ~p ~p~n", [Name, Tab]),
+ dbg_out("Checkpoint retainer created ~p ~tp~n", [Name, Tab]),
R#retainer{store = {ets, T}, really_retain = ReallyR2}.
%% Copy the dumped table into retainer if needed
@@ -849,7 +849,7 @@ retainer_loop(Cp = #checkpoint_args{is_activated=false, name=Name}) ->
retainer_loop(Cp#checkpoint_args{iterators = Iters});
{system, From, Msg} ->
- dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ dbg_out("~p got {system, ~p, ~tp}~n", [?MODULE, From, Msg]),
sys:handle_system_msg(Msg, From, Cp#checkpoint_args.supervisor,
?MODULE, [], Cp)
end;
@@ -938,11 +938,11 @@ retainer_loop(Cp = #checkpoint_args{name=Name}) ->
retainer_loop(Cp#checkpoint_args{iterators = Iters});
{system, From, Msg} ->
- dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ dbg_out("~p got {system, ~p, ~tp}~n", [?MODULE, From, Msg]),
sys:handle_system_msg(Msg, From, Cp#checkpoint_args.supervisor,
?MODULE, [], Cp);
Msg ->
- dbg_out("~p got ~p~n", [?MODULE, Msg])
+ dbg_out("~p got ~tp~n", [?MODULE, Msg])
end.
maybe_activate(Cp)
diff --git a/lib/mnesia/src/mnesia_controller.erl b/lib/mnesia/src/mnesia_controller.erl
index 6b93935cb4..77013489b3 100644
--- a/lib/mnesia/src/mnesia_controller.erl
+++ b/lib/mnesia/src/mnesia_controller.erl
@@ -446,7 +446,7 @@ try_schedule_late_disc_load(Tabs, Reason, MsgTag) ->
[BadNodes]),
try_schedule_late_disc_load(Tabs, Reason, MsgTag);
{aborted, AbortReason} ->
- fatal("Cannot late_load_tables~p: ~p~n",
+ fatal("Cannot late_load_tables ~tp: ~tp~n",
[[Tabs, Reason, MsgTag], AbortReason])
end.
@@ -535,7 +535,7 @@ try_merge_schema(Nodes, Told0, UserFun) ->
end,
try_merge_schema(Nodes, Told, UserFun);
{atomic, {"Cannot get cstructs", Node, Reason}} ->
- dbg_out("Cannot get cstructs, Node ~p ~p~n", [Node, Reason]),
+ dbg_out("Cannot get cstructs, Node ~p ~tp~n", [Node, Reason]),
timer:sleep(300), % Avoid a endless loop look alike
try_merge_schema(Nodes, Told0, UserFun);
{aborted, {shutdown, _}} -> %% One of the nodes is going down
@@ -826,12 +826,12 @@ handle_call({del_other, Who}, _From, State = #state{others=Others0}) ->
{reply, ok, State#state{others=Others}};
handle_call(Msg, _From, State) ->
- error("~p got unexpected call: ~p~n", [?SERVER_NAME, Msg]),
+ error("~p got unexpected call: ~tp~n", [?SERVER_NAME, Msg]),
noreply(State).
late_disc_load(TabsR, Reason, RemoteLoaders, From,
State = #state{loader_queue = LQ, late_loader_queue = LLQ}) ->
- verbose("Intend to load tables: ~p~n", [TabsR]),
+ verbose("Intend to load tables: ~tp~n", [TabsR]),
?eval_debug_fun({?MODULE, late_disc_load},
[{tabs, TabsR},
{reason, Reason},
@@ -1118,7 +1118,7 @@ handle_cast({adopt_orphans, Node, Tabs}, State) ->
noreply(State2);
handle_cast(Msg, State) ->
- error("~p got unexpected cast: ~p~n", [?SERVER_NAME, Msg]),
+ error("~p got unexpected cast: ~tp~n", [?SERVER_NAME, Msg]),
noreply(State).
handle_sync_tabs([Tab | Tabs], From) ->
@@ -1166,7 +1166,7 @@ handle_info(#dumper_done{worker_pid=Pid, worker_res=Res}, State) ->
State3 = opt_start_worker(State2),
noreply(State3);
true ->
- fatal("Dumper failed: ~p~n state: ~p~n", [Res, State]),
+ fatal("Dumper failed: ~p~n state: ~tp~n", [Res, State]),
{stop, fatal, State}
end;
@@ -1249,7 +1249,7 @@ handle_info(#sender_done{worker_pid=Pid, worker_res=Res}, State) ->
true ->
%% No need to send any message to the table receiver
%% since it will soon get a mnesia_down anyway
- fatal("Sender failed: ~p~n state: ~p~n", [Res, State]),
+ fatal("Sender failed: ~p~n state: ~tp~n", [Res, State]),
{stop, fatal, State}
end;
@@ -1257,7 +1257,7 @@ handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
?SAFE(set(mnesia_status, stopping)),
case State#state.dumper_pid of
undefined ->
- dbg_out("~p was ~p~n", [?SERVER_NAME, R]),
+ dbg_out("~p was ~tp~n", [?SERVER_NAME, R]),
{stop, shutdown, State};
_ ->
noreply(State#state{is_stopping = true})
@@ -1266,12 +1266,12 @@ handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
handle_info({'EXIT', Pid, R}, State) when Pid == State#state.dumper_pid ->
case State#state.dumper_queue of
[#schema_commit_lock{}|Workers] -> %% Schema trans crashed or was killed
- dbg_out("WARNING: Dumper ~p exited ~p~n", [Pid, R]),
+ dbg_out("WARNING: Dumper ~p exited ~tp~n", [Pid, R]),
State2 = State#state{dumper_queue = Workers, dumper_pid = undefined},
State3 = opt_start_worker(State2),
noreply(State3);
_Other ->
- fatal("Dumper or schema commit crashed: ~p~n state: ~p~n", [R, State]),
+ fatal("Dumper or schema commit crashed: ~p~n state: ~tp~n", [R, State]),
{stop, fatal, State}
end;
@@ -1280,15 +1280,15 @@ handle_info(Msg = {'EXIT', Pid, R}, State) when R /= wait_for_tables_timeout ->
true ->
%% No need to send any message to the table receiver
%% since it will soon get a mnesia_down anyway
- fatal("Sender crashed: ~p~n state: ~p~n", [{Pid,R}, State]),
+ fatal("Sender crashed: ~p~n state: ~tp~n", [{Pid,R}, State]),
{stop, fatal, State};
false ->
case lists:keymember(Pid, 1, get_loaders(State)) of
true ->
- fatal("Loader crashed: ~p~n state: ~p~n", [R, State]),
+ fatal("Loader crashed: ~p~n state: ~tp~n", [R, State]),
{stop, fatal, State};
false ->
- error("~p got unexpected info: ~p~n", [?SERVER_NAME, Msg]),
+ error("~p got unexpected info: ~tp~n", [?SERVER_NAME, Msg]),
noreply(State)
end
end;
@@ -1308,7 +1308,7 @@ handle_info({'EXIT', Pid, wait_for_tables_timeout}, State) ->
noreply(State);
handle_info(Msg, State) ->
- error("~p got unexpected info: ~p~n", [?SERVER_NAME, Msg]),
+ error("~p got unexpected info: ~tp~n", [?SERVER_NAME, Msg]),
noreply(State).
sync_tab_timeout(Pid, [{{sync_tab, Tab}, Pids} | Tail]) ->
@@ -2054,7 +2054,7 @@ opt_start_sender2([Sender|R], Pids, Kept, LoaderQ) ->
Pid = spawn_link(?MODULE, send_and_reply,[self(), Sender]),
opt_start_sender2(R,[{Pid,Sender}|Pids],Kept,LoaderQ);
true ->
- verbose("Send table failed ~p not active on this node ~n", [Tab]),
+ verbose("Send table failed ~tp not active on this node ~n", [Tab]),
Sender#send_table.receiver_pid ! {copier_done, node()},
opt_start_sender2(R,Pids, Kept, LoaderQ)
end.
@@ -2239,7 +2239,7 @@ disc_load_table(Tab, Reason, ReplyTo) ->
Done#loader_done{is_loaded = false,
reply = Res};
true ->
- fatal("Cannot load table ~p from disc: ~p~n", [Tab, Res])
+ fatal("Cannot load table ~tp from disc: ~tp~n", [Tab, Res])
end.
filter_active(Tab) ->
diff --git a/lib/mnesia/src/mnesia_dumper.erl b/lib/mnesia/src/mnesia_dumper.erl
index eb02a585a6..f0ed7aef4a 100644
--- a/lib/mnesia/src/mnesia_dumper.erl
+++ b/lib/mnesia/src/mnesia_dumper.erl
@@ -193,7 +193,7 @@ do_perform_dump(Cont, InPlace, InitBy, Regulator, OldVersion) ->
do_perform_dump(C2, InPlace, InitBy, Regulator, Version)
catch _:R when R =/= fatal ->
ST = erlang:get_stacktrace(),
- Reason = {"Transaction log dump error: ~p~n", [{R, ST}]},
+ Reason = {"Transaction log dump error: ~tp~n", [{R, ST}]},
close_files(InPlace, {error, Reason}, InitBy),
exit(Reason)
end;
@@ -329,7 +329,7 @@ perform_update(Tid, SchemaOps, _DumperMode, _UseDir) ->
ST = erlang:get_stacktrace(),
Error = {error, {"Schema update error", {Reason, ST}}},
close_files(InPlace, Error, InitBy),
- fatal("Schema update error ~p ~p", [{Reason,ST}, SchemaOps])
+ fatal("Schema update error ~tp ~tp", [{Reason,ST}, SchemaOps])
end.
insert_ops(_Tid, _Storage, [], _InPlace, _InitBy, _) -> ok;
@@ -1166,7 +1166,7 @@ needs_dump_ets(Tab) ->
DcdF = mnesia_lib:tab2dcd(Tab),
case file:read_file_info(DcdF) of
{error, Reason} ->
- mnesia_lib:dbg_out("File ~p info_error ~p ~n",
+ mnesia_lib:dbg_out("File ~tp info_error ~tp ~n",
[DcdF, Reason]),
true;
{ok, DcdInfo} ->
@@ -1205,7 +1205,7 @@ prepare_open(Tab, UpdateInPlace) ->
Tmp = mnesia_lib:tab2tmp(Tab),
try ok = mnesia_lib:copy_file(Dat, Tmp)
catch error:Error ->
- fatal("Cannot copy dets file ~p to ~p: ~p~n",
+ fatal("Cannot copy dets file ~tp to ~tp: ~tp~n",
[Dat, Tmp, Error])
end,
Tmp
@@ -1441,7 +1441,7 @@ start_regulator() ->
{ok, Pid} ->
Pid;
{error, Reason} ->
- fatal("Failed to start ~n: ~p~n", [N, Reason])
+ fatal("Failed to start ~n: ~tp~n", [N, Reason])
end
end.
diff --git a/lib/mnesia/src/mnesia_event.erl b/lib/mnesia/src/mnesia_event.erl
index b06043bc61..49b3990086 100644
--- a/lib/mnesia/src/mnesia_event.erl
+++ b/lib/mnesia/src/mnesia_event.erl
@@ -103,11 +103,11 @@ handle_any_event({mnesia_system_event, Event}, State) ->
handle_any_event({mnesia_table_event, Event}, State) ->
handle_table_event(Event, State);
handle_any_event(Msg, State) ->
- report_error("~p got unexpected event: ~p~n", [?MODULE, Msg]),
+ report_error("~p got unexpected event: ~tp~n", [?MODULE, Msg]),
{ok, State}.
handle_table_event({Oper, Record, TransId}, State) ->
- report_info("~p performed by ~p on record:~n\t~p~n",
+ report_info("~p performed by ~p on record:~n\t~tp~n",
[Oper, TransId, Record]),
{ok, State}.
@@ -155,7 +155,7 @@ handle_system_event({mnesia_down, Node}, State) ->
end;
handle_system_event({mnesia_overload, Details}, State) ->
- report_warning("Mnesia is overloaded: ~w~n", [Details]),
+ report_warning("Mnesia is overloaded: ~tw~n", [Details]),
{ok, State};
handle_system_event({mnesia_info, Format, Args}, State) ->
@@ -175,16 +175,16 @@ handle_system_event({mnesia_fatal, Format, Args, BinaryCore}, State) ->
{ok, State#state{dumped_core = true}};
handle_system_event({inconsistent_database, Reason, Node}, State) ->
- report_error("mnesia_event got {inconsistent_database, ~w, ~w}~n",
+ report_error("mnesia_event got {inconsistent_database, ~tw, ~w}~n",
[Reason, Node]),
{ok, State};
handle_system_event({mnesia_user, Event}, State) ->
- report_info("User event: ~p~n", [Event]),
+ report_info("User event: ~tp~n", [Event]),
{ok, State};
handle_system_event(Msg, State) ->
- report_error("mnesia_event got unexpected system event: ~p~n", [Msg]),
+ report_error("mnesia_event got unexpected system event: ~tp~n", [Msg]),
{ok, State}.
report_info(Format0, Args0) ->
diff --git a/lib/mnesia/src/mnesia_index.erl b/lib/mnesia/src/mnesia_index.erl
index c79f790973..d121bd01e9 100644
--- a/lib/mnesia/src/mnesia_index.erl
+++ b/lib/mnesia/src/mnesia_index.erl
@@ -420,7 +420,7 @@ make_ram_index(Tab, Storage, [Pos | Tail]) ->
add_ram_index(Tab, Storage, {Pos, _Pref}) ->
Type = ordered,
- verbose("Creating index for ~w ~p ~p~n", [Tab, Pos, Type]),
+ verbose("Creating index for ~tw ~p ~p~n", [Tab, Pos, Type]),
SetOrBag = val({Tab, setorbag}),
IxValsF = index_vals_f(Storage, Tab, Pos),
IxFun = fun(Val, Key) -> {{Val, Key}} end,
diff --git a/lib/mnesia/src/mnesia_late_loader.erl b/lib/mnesia/src/mnesia_late_loader.erl
index e273329ffc..e4f8dcf2b9 100644
--- a/lib/mnesia/src/mnesia_late_loader.erl
+++ b/lib/mnesia/src/mnesia_late_loader.erl
@@ -87,13 +87,13 @@ loop(State) ->
loop(State);
{system, From, Msg} ->
- mnesia_lib:dbg_out("~p got {system, ~p, ~p}~n",
+ mnesia_lib:dbg_out("~p got {system, ~p, ~tp}~n",
[?SERVER_NAME, From, Msg]),
Parent = State#state.supervisor,
sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], State);
Msg ->
- mnesia_lib:error("~p got unexpected message: ~p~n",
+ mnesia_lib:error("~p got unexpected message: ~tp~n",
[?SERVER_NAME, Msg]),
loop(State)
end.
diff --git a/lib/mnesia/src/mnesia_lib.erl b/lib/mnesia/src/mnesia_lib.erl
index 1fdc656600..53fdd76de8 100644
--- a/lib/mnesia/src/mnesia_lib.erl
+++ b/lib/mnesia/src/mnesia_lib.erl
@@ -467,7 +467,7 @@ pr_other(Var) ->
no -> {node_not_running, node()};
_ -> {no_exists, Var}
end,
- verbose("~p (~p) val(mnesia_gvar, ~w) -> ~p ~p ~n",
+ verbose("~p (~tp) val(mnesia_gvar, ~tw) -> ~p ~tp ~n",
[self(), process_info(self(), registered_name),
Var, Why, erlang:get_stacktrace()]),
mnesia:abort(Why).
@@ -654,7 +654,7 @@ coredump() ->
coredump(CrashInfo) ->
Core = mkcore(CrashInfo),
Out = core_file(),
- important("Writing Mnesia core to file: ~p...~p~n", [Out, CrashInfo]),
+ important("Writing Mnesia core to file: ~tp...~tp~n", [Out, CrashInfo]),
_ = file:write_file(Out, Core),
Out.
@@ -844,7 +844,7 @@ vcore() ->
case file:list_dir(Cwd) of
{ok, Files}->
CoreFiles = lists:sort(lists:zf(Filter, Files)),
- show("Mnesia core files: ~p~n", [CoreFiles]),
+ show("Mnesia core files: ~tp~n", [CoreFiles]),
vcore(lists:last(CoreFiles));
Error ->
Error
@@ -853,17 +853,17 @@ vcore() ->
vcore(Bin) when is_binary(Bin) ->
Core = binary_to_term(Bin),
Fun = fun({Item, Info}) ->
- show("***** ~p *****~n", [Item]),
+ show("***** ~tp *****~n", [Item]),
case catch vcore_elem({Item, Info}) of
{'EXIT', Reason} ->
- show("{'EXIT', ~p}~n", [Reason]);
+ show("{'EXIT', ~tp}~n", [Reason]);
_ -> ok
end
end,
lists:foreach(Fun, Core);
vcore(File) ->
- show("~n***** Mnesia core: ~p *****~n", [File]),
+ show("~n***** Mnesia core: ~tp *****~n", [File]),
case file:read_file(File) of
{ok, Bin} ->
vcore(Bin);
@@ -879,7 +879,7 @@ vcore_elem({schema_file, {ok, B}}) ->
vcore_elem({logfile, {ok, BinList}}) ->
Fun = fun({F, Info}) ->
- show("----- logfile: ~p -----~n", [F]),
+ show("----- logfile: ~tp -----~n", [F]),
case Info of
{ok, B} ->
Fname = "/tmp/mnesia_vcore_elem.TMP",
@@ -887,7 +887,7 @@ vcore_elem({logfile, {ok, BinList}}) ->
mnesia_log:view(Fname),
file:delete(Fname);
_ ->
- show("~p~n", [Info])
+ show("~tp~n", [Info])
end
end,
lists:foreach(Fun, BinList);
@@ -895,12 +895,12 @@ vcore_elem({logfile, {ok, BinList}}) ->
vcore_elem({crashinfo, {Format, Args}}) ->
show(Format, Args);
vcore_elem({gvar, L}) ->
- show("~p~n", [lists:sort(L)]);
+ show("~tp~n", [lists:sort(L)]);
vcore_elem({transactions, Info}) ->
mnesia_tm:display_info(user, Info);
vcore_elem({_Item, Info}) ->
- show("~p~n", [Info]).
+ show("~tp~n", [Info]).
fix_error(X) ->
set(last_error, X), %% for debugabililty
@@ -1018,7 +1018,7 @@ report_system_event({'EXIT', Reason}, Event) ->
end;
Error ->
- Msg = "Mnesia(~p): Cannot report event ~p: ~p (~p)~n",
+ Msg = "Mnesia(~tp): Cannot report event ~tp: ~tp (~tp)~n",
error_logger:format(Msg, [node(), Event, Reason, Error])
end,
ok;
diff --git a/lib/mnesia/src/mnesia_loader.erl b/lib/mnesia/src/mnesia_loader.erl
index c710470a2c..4c6336cb73 100644
--- a/lib/mnesia/src/mnesia_loader.erl
+++ b/lib/mnesia/src/mnesia_loader.erl
@@ -46,7 +46,7 @@ val(Var) ->
disc_load_table(Tab, Reason) ->
Storage = val({Tab, storage_type}),
Type = val({Tab, setorbag}),
- dbg_out("Getting table ~p (~p) from disc: ~p~n",
+ dbg_out("Getting table ~tp (~p) from disc: ~tp~n",
[Tab, Storage, Reason]),
?eval_debug_fun({?MODULE, do_get_disc_copy},
[{tab, Tab},
@@ -56,7 +56,7 @@ disc_load_table(Tab, Reason) ->
do_get_disc_copy2(Tab, Reason, Storage, Type).
do_get_disc_copy2(Tab, _Reason, Storage, _Type) when Storage == unknown ->
- verbose("Local table copy of ~p has recently been deleted, ignored.~n",
+ verbose("Local table copy of ~tp has recently been deleted, ignored.~n",
[Tab]),
{not_loaded, storage_unknown};
do_get_disc_copy2(Tab, Reason, Storage, Type) when Storage == disc_copies ->
@@ -199,20 +199,20 @@ net_load_table(Tab, Reason, Ns, _Cs) ->
try_net_load_table(Tab, Reason, Ns, val({Tab, cstruct})).
try_net_load_table(Tab, _Reason, [], _Cs) ->
- verbose("Copy failed. No active replicas of ~p are available.~n", [Tab]),
+ verbose("Copy failed. No active replicas of ~tp are available.~n", [Tab]),
{not_loaded, none_active};
try_net_load_table(Tab, Reason, Ns, Cs) ->
Storage = mnesia_lib:cs_to_storage_type(node(), Cs),
do_get_network_copy(Tab, Reason, Ns, Storage, Cs).
do_get_network_copy(Tab, _Reason, _Ns, unknown, _Cs) ->
- verbose("Local table copy of ~p has recently been deleted, ignored.~n", [Tab]),
+ verbose("Local table copy of ~tp has recently been deleted, ignored.~n", [Tab]),
{not_loaded, storage_unknown};
do_get_network_copy(Tab, Reason, Ns, Storage, Cs) ->
[Node | Tail] = Ns,
case lists:member(Node,val({current, db_nodes})) of
true ->
- dbg_out("Getting table ~p (~p) from node ~p: ~p~n",
+ dbg_out("Getting table ~tp (~p) from node ~p: ~tp~n",
[Tab, Storage, Node, Reason]),
?eval_debug_fun({?MODULE, do_get_network_copy},
[{tab, Tab}, {reason, Reason},
@@ -222,7 +222,7 @@ do_get_network_copy(Tab, Reason, Ns, Storage, Cs) ->
set({Tab, load_node}, Node),
set({Tab, load_reason}, Reason),
mnesia_controller:i_have_tab(Tab),
- dbg_out("Table ~p copied from ~p to ~p~n", [Tab, Node, node()]),
+ dbg_out("Table ~tp copied from ~p to ~p~n", [Tab, Node, node()]),
{loaded, ok};
Err = {error, _} when element(1, Reason) == dumper ->
{not_loaded,Err};
@@ -286,12 +286,12 @@ init_receiver(Node, Tab,Storage,Cs,Reason) ->
element(1,Reason) == dumper ->
{error,Result};
{atomic, {error,Result}} ->
- fatal("Cannot create table ~p: ~p~n",
+ fatal("Cannot create table ~tp: ~tp~n",
[[Tab, Storage], Result]);
{atomic, Result} -> Result;
{aborted, nomore} -> restart;
{aborted, _Reas} ->
- verbose("Receiver failed on ~p from ~p:~nReason: ~p~n",
+ verbose("Receiver failed on ~tp from ~p:~nReason: ~tp~n",
[Tab,Node,_Reas]),
down %% either this node or sender is dying
end,
@@ -313,7 +313,7 @@ start_remote_sender(Node,Tab,Storage) ->
{SenderPid, TabSize, DetsData};
%% Protocol conversion hack
{copier_done, Node} ->
- verbose("Sender of table ~p crashed on node ~p ~n", [Tab, Node]),
+ verbose("Sender of table ~tp crashed on node ~p ~n", [Tab, Node]),
down(Tab, Storage)
end.
@@ -374,7 +374,7 @@ do_init_table(Tab,Storage,Cs,SenderPid,
tab_receiver(Node,Tab,Storage,Cs,OrigTabRec);
Reason ->
Msg = "[d]ets:init table failed",
- verbose("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ verbose("~ts: ~tp: ~tp~n", [Msg, Tab, Reason]),
down(Tab, Storage)
end;
Error ->
@@ -432,7 +432,7 @@ tab_receiver(Node, Tab, Storage, Cs, OrigTabRec) ->
%% Protocol conversion hack
{copier_done, Node} ->
- verbose("Sender of table ~p crashed on node ~p ~n", [Tab, Node]),
+ verbose("Sender of table ~tp crashed on node ~p ~n", [Tab, Node]),
down(Tab, Storage);
{'EXIT', Pid, Reason} ->
@@ -490,7 +490,7 @@ ext_load_table(Mod, Alias, Tab, Reason) ->
ext_init_table(Action, Alias, Mod, Tab, Fun, State, Sender) ->
case Fun(Action) of
{copier_done, Node} ->
- verbose("Receiver of table ~p crashed on ~p (more)~n", [Tab, Node]),
+ verbose("Receiver of table ~tp crashed on ~p (more)~n", [Tab, Node]),
down(Tab, {ext,Alias,Mod});
{Data, NewFun} ->
case Mod:receive_data(Data, Alias, Tab, Sender, State) of
@@ -553,7 +553,7 @@ finish_copy(Storage,Tab,Cs,SenderPid,DatBin,OrigTabRec) ->
ok;
{error, Reason} ->
Msg = "Failed to handle last",
- verbose("~s: ~p: ~p~n", [Msg, Tab, Reason]),
+ verbose("~ts: ~tp: ~tp~n", [Msg, Tab, Reason]),
down(Tab, Storage)
end.
@@ -859,7 +859,7 @@ send_more(Pid, N, Chunk, DataState, Tab, Storage) ->
send_more(Pid, 1, NewChunk, Init(), Tab, Storage);
{copier_done, Node} when Node == node(Pid)->
- verbose("Receiver of table ~p crashed on ~p (more)~n", [Tab, Node]),
+ verbose("Receiver of table ~tp crashed on ~p (more)~n", [Tab, Node]),
throw(receiver_died)
end.
@@ -937,7 +937,7 @@ finish_copy(Pid, Tab, Storage, RemoteS, NeedLock) ->
{Pid, no_more} -> % Dont bother about the spurious 'more' message
no_more;
{copier_done, Node} ->
- verbose("Tab receiver ~p crashed (more): ~p~n", [Tab, Node]),
+ verbose("Tab receiver ~tp crashed (more): ~p~n", [Tab, Node]),
receiver_died
end
end,
diff --git a/lib/mnesia/src/mnesia_locker.erl b/lib/mnesia/src/mnesia_locker.erl
index 59fd89059f..073b48abc0 100644
--- a/lib/mnesia/src/mnesia_locker.erl
+++ b/lib/mnesia/src/mnesia_locker.erl
@@ -245,7 +245,7 @@ loop(State) ->
do_stop();
{system, From, Msg} ->
- verbose("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ verbose("~p got {system, ~p, ~tp}~n", [?MODULE, From, Msg]),
Parent = State#state.supervisor,
sys:handle_system_msg(Msg, From, Parent, ?MODULE, [], State);
@@ -254,7 +254,7 @@ loop(State) ->
loop(State);
Msg ->
- error("~p got unexpected message: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected message: ~tp~n", [?MODULE, Msg]),
loop(State)
end.
diff --git a/lib/mnesia/src/mnesia_log.erl b/lib/mnesia/src/mnesia_log.erl
index 9536effd42..55b1d6e419 100644
--- a/lib/mnesia/src/mnesia_log.erl
+++ b/lib/mnesia/src/mnesia_log.erl
@@ -310,7 +310,7 @@ verify_no_exists(Fname) ->
false ->
ok;
true ->
- fatal("Log file exists: ~p~n", [Fname])
+ fatal("Log file exists: ~tp~n", [Fname])
end.
open_log(Name, Header, Fname) ->
@@ -331,7 +331,7 @@ open_log(Name, Header, Fname, Exists, Repair) ->
open_log(Name, Header, Fname, Exists, Repair, Mode) ->
Args = [{file, Fname}, {name, Name}, {repair, Repair}, {mode, Mode}],
-%% io:format("~p:open_log: ~p ~p~n", [?MODULE, Name, Fname]),
+%% io:format("~p:open_log: ~tp ~tp~n", [?MODULE, Name, Fname]),
case mnesia_monitor:open_log(Args) of
{ok, Log} when Exists == true ->
Log;
@@ -344,19 +344,19 @@ open_log(Name, Header, Fname, Exists, Repair, Mode) ->
write_header(Log, Header),
Log;
{repaired, Log, _Recover, BadBytes} ->
- mnesia_lib:important("Data may be missing, log ~p repaired: Lost ~p bytes~n",
+ mnesia_lib:important("Data may be missing, log ~tp repaired: Lost ~p bytes~n",
[Fname, BadBytes]),
Log;
{error, Reason = {file_error, _Fname, emfile}} ->
- fatal("Cannot open log file ~p: ~p~n", [Fname, Reason]);
+ fatal("Cannot open log file ~tp: ~tp~n", [Fname, Reason]);
{error, Reason} when Repair == true ->
file:delete(Fname),
- mnesia_lib:important("Data may be missing, Corrupt logfile deleted: ~p, ~p ~n",
+ mnesia_lib:important("Data may be missing, Corrupt logfile deleted: ~tp, ~tp ~n",
[Fname, Reason]),
%% Create a new
open_log(Name, Header, Fname, false, false, read_write);
{error, Reason} ->
- fatal("Cannot open log file ~p: ~p~n", [Fname, Reason])
+ fatal("Cannot open log file ~tp: ~tp~n", [Fname, Reason])
end.
write_header(Log, Header) ->
@@ -381,7 +381,7 @@ close_log(Log) ->
{error, {read_only_mode, Log}} ->
ok;
{error, Reason} ->
- mnesia_lib:important("Failed syncing ~p to_disk reason ~p ~n",
+ mnesia_lib:important("Failed syncing ~tp to_disk reason ~tp ~n",
[Log, Reason])
end,
mnesia_monitor:close_log(Log).
@@ -464,13 +464,13 @@ chunk_log(_Log, eof) ->
chunk_log(Log, Cont) ->
case disk_log:chunk(Log, Cont) of
{error, Reason} ->
- fatal("Possibly truncated ~p file: ~p~n",
+ fatal("Possibly truncated ~tp file: ~tp~n",
[Log, Reason]);
{C2, Chunk, _BadBytes} ->
%% Read_only case, should we warn about the bad log file?
%% BUGBUG Should we crash if Repair == false ??
%% We got to check this !!
- mnesia_lib:important("~p repaired, lost ~p bad bytes~n", [Log, _BadBytes]),
+ mnesia_lib:important("~tp repaired, lost ~p bad bytes~n", [Log, _BadBytes]),
{C2, Chunk};
Other ->
Other
@@ -505,7 +505,7 @@ prepare_decision_log_dump(false, Prev) ->
ok ->
prepare_decision_log_dump(true, Prev);
{error, Reason} ->
- fatal("Cannot rename decision log file ~p -> ~p: ~p~n",
+ fatal("Cannot rename decision log file ~tp -> ~tp: ~tp~n",
[decision_log_file(), Prev, Reason])
end;
prepare_decision_log_dump(true, Prev) ->
@@ -522,7 +522,7 @@ confirm_decision_log_dump() ->
ok ->
file:delete(previous_decision_log_file());
{error, Reason} ->
- fatal("Cannot confirm decision log dump: ~p~n",
+ fatal("Cannot confirm decision log dump: ~tp~n",
[Reason])
end.
@@ -561,7 +561,7 @@ view() ->
lists:foreach(fun(F) -> view(F) end, log_files()).
view(File) ->
- mnesia_lib:show("***** ~p ***** ~n", [File]),
+ mnesia_lib:show("***** ~tp ***** ~n", [File]),
case exists(File) of
false ->
nolog;
@@ -574,25 +574,25 @@ view(File) ->
{repaired, _, _, _} ->
view_file(start, N);
{error, Reason} ->
- error("Cannot open log ~p: ~p~n", [File, Reason])
+ error("Cannot open log ~tp: ~tp~n", [File, Reason])
end
end.
view_file(C, Log) ->
case disk_log:chunk(Log, C) of
{error, Reason} ->
- error("** Possibly truncated FILE ~p~n", [Reason]),
+ error("** Possibly truncated FILE ~tp~n", [Reason]),
error;
eof ->
disk_log:close(Log),
eof;
{C2, Terms, _BadBytes} ->
- dbg_out("Lost ~p bytes in ~p ~n", [_BadBytes, Log]),
- lists:foreach(fun(X) -> mnesia_lib:show("~p~n", [X]) end,
+ dbg_out("Lost ~p bytes in ~tp ~n", [_BadBytes, Log]),
+ lists:foreach(fun(X) -> mnesia_lib:show("~tp~n", [X]) end,
Terms),
view_file(C2, Log);
{C2, Terms} ->
- lists:foreach(fun(X) -> mnesia_lib:show("~p~n", [X]) end,
+ lists:foreach(fun(X) -> mnesia_lib:show("~tp~n", [X]) end,
Terms),
view_file(C2, Log)
end.
@@ -750,12 +750,12 @@ abort_write_fun(B, What, Args) ->
abort_write(B, What, Args, Reason) ->
Mod = B#backup_args.module,
Opaque = B#backup_args.opaque,
- dbg_out("Failed to perform backup. M=~p:F=~p:A=~p -> ~p~n",
+ dbg_out("Failed to perform backup. M=~p:F=~tp:A=~tp -> ~tp~n",
[Mod, What, Args, Reason]),
try apply(Mod, abort_write, [Opaque]) of
{ok, _Res} -> throw({error, Reason})
catch _:Other ->
- error("Failed to abort backup. ~p:~p~p -> ~p~n",
+ error("Failed to abort backup. ~p:~tp~tp -> ~tp~n",
[Mod, abort_write, [Opaque], Other]),
throw({error, Reason})
end.
@@ -802,7 +802,7 @@ select_source(Tab, Name, PrevName) ->
{PrevName, retainer};
_ ->
%% Do a full backup anyway
- dbg_out("Incremental backup escalated to full backup: ~p~n", [Tab]),
+ dbg_out("Incremental backup escalated to full backup: ~tp~n", [Tab]),
{Name, table}
end
end.
diff --git a/lib/mnesia/src/mnesia_monitor.erl b/lib/mnesia/src/mnesia_monitor.erl
index 22a24b6dc9..4cfe16dec0 100644
--- a/lib/mnesia/src/mnesia_monitor.erl
+++ b/lib/mnesia/src/mnesia_monitor.erl
@@ -178,10 +178,10 @@ check_protocol([{Node, {reject, _Mon, Version, Protocol}} | Tail], Protocols) ->
[Node, Protocols, Version, Protocol]),
check_protocol(Tail, Protocols);
check_protocol([{error, _Reason} | Tail], Protocols) ->
- dbg_out("~p connect failed error: ~p~n", [?MODULE, _Reason]),
+ dbg_out("~p connect failed error: ~tp~n", [?MODULE, _Reason]),
check_protocol(Tail, Protocols);
check_protocol([{badrpc, _Reason} | Tail], Protocols) ->
- dbg_out("~p connect failed badrpc: ~p~n", [?MODULE, _Reason]),
+ dbg_out("~p connect failed badrpc: ~tp~n", [?MODULE, _Reason]),
check_protocol(Tail, Protocols);
check_protocol([], [Protocol | _Protocols]) ->
set(protocol_version, Protocol),
@@ -246,10 +246,10 @@ start_proc(Who, Mod, Fun, Args) ->
proc_lib:start_link(mnesia_sp, init_proc, Args2, infinity).
terminate_proc(Who, R, State) when R /= shutdown, R /= killed ->
- fatal("~p crashed: ~p state: ~p~n", [Who, R, State]);
+ fatal("~p crashed: ~p state: ~tp~n", [Who, R, State]);
terminate_proc(Who, Reason, _State) ->
- mnesia_lib:verbose("~p terminated: ~p~n", [Who, Reason]),
+ mnesia_lib:verbose("~p terminated: ~tp~n", [Who, Reason]),
ok.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -294,7 +294,7 @@ init([Parent]) ->
{ok, #state{supervisor = Parent}}
catch _:Reason ->
- mnesia_lib:report_fatal("Bad configuration: ~p~n", [Reason]),
+ mnesia_lib:report_fatal("Bad configuration: ~tp~n", [Reason]),
{stop, {bad_config, Reason}}
end.
@@ -333,7 +333,7 @@ handle_call({mktab, Tab, Args}, _From, State) ->
catch error:ExitReason ->
Msg = "Cannot create ets table",
Reason = {system_limit, Msg, Tab, Args, ExitReason},
- fatal("~p~n", [Reason]),
+ fatal("~tp~n", [Reason]),
{noreply, State}
end;
@@ -353,7 +353,7 @@ handle_call({open_dets, Tab, Args}, _From, State) ->
{error, Reason} ->
Msg = "Cannot open dets table",
Error = {error, {Msg, Tab, Args, Reason}},
- fatal("~p~n", [Error]),
+ fatal("~tp~n", [Error]),
{noreply, State}
end;
@@ -385,7 +385,7 @@ handle_call({reopen_log, Name, Fname, Head}, _From, State) ->
{error, Reason} ->
Msg = "Cannot rename disk_log file",
Error = {error, {Msg, Name, Fname, Head, Reason}},
- fatal("~p~n", [Error]),
+ fatal("~tp~n", [Error]),
{noreply, State}
end;
@@ -400,7 +400,7 @@ handle_call({close_log, Name}, _From, State) ->
{error, Reason} ->
Msg = "Cannot close disk_log file",
Error = {error, {Msg, Name, Reason}},
- fatal("~p~n", [Error]),
+ fatal("~tp~n", [Error]),
{noreply, State}
end;
@@ -461,7 +461,7 @@ handle_call(init, _From, State) ->
{reply, EarlyNodes, State2};
handle_call(Msg, _From, State) ->
- error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected call: ~tp~n", [?MODULE, Msg]),
{noreply, State}.
accept_protocol(Mon, Version, Protocol, From, State) ->
@@ -535,7 +535,7 @@ handle_cast({inconsistent_database, Context, Node}, State) ->
{noreply, State};
handle_cast(Msg, State) ->
- error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected cast: ~tp~n", [?MODULE, Msg]),
{noreply, State}.
%%----------------------------------------------------------------------
@@ -572,7 +572,7 @@ handle_info(Msg = {'EXIT',Pid,_}, State) ->
%% We have probably got an exit signal from
%% disk_log or dets
Hint = "Hint: check that the disk still is writable",
- fatal("~p got unexpected info: ~p; ~p~n",
+ fatal("~p got unexpected info: ~tp; ~p~n",
[?MODULE, Msg, Hint])
end;
@@ -599,13 +599,13 @@ handle_info({disk_log, _Node, Log, Info}, State) ->
{truncated, _No} ->
ok;
_ ->
- mnesia_lib:important("Warning Log file ~p error reason ~s~n",
+ mnesia_lib:important("Warning Log file ~tp error reason ~ts~n",
[Log, disk_log:format_error(Info)])
end,
{noreply, State};
handle_info(Msg, State) ->
- error("~p got unexpected info (~p): ~p~n", [?MODULE, State, Msg]).
+ error("~p got unexpected info (~tp): ~tp~n", [?MODULE, State, Msg]).
process_q(State = #state{mq=[]}) -> {noreply,State};
process_q(State = #state{mq=[{info,Msg}|R]}) ->
diff --git a/lib/mnesia/src/mnesia_recover.erl b/lib/mnesia/src/mnesia_recover.erl
index b204fb282f..d792070332 100644
--- a/lib/mnesia/src/mnesia_recover.erl
+++ b/lib/mnesia/src/mnesia_recover.erl
@@ -762,7 +762,7 @@ handle_call(sync, _From, State) ->
{reply, ok, State};
handle_call(Msg, _From, State) ->
- error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected call: ~tp~n", [?MODULE, Msg]),
{noreply, State}.
do_log_mnesia_up(Node) ->
@@ -881,7 +881,7 @@ handle_cast({log_dump_overload, Flag}, State) when is_boolean(Flag) ->
{noreply, State#state{log_dump_overload = Flag}};
handle_cast(Msg, State) ->
- error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected cast: ~tp~n", [?MODULE, Msg]),
{noreply, State}.
%%----------------------------------------------------------------------
@@ -927,11 +927,11 @@ handle_info({force_decision, Tid}, State) ->
end;
handle_info({'EXIT', Pid, R}, State) when Pid == State#state.supervisor ->
- mnesia_lib:dbg_out("~p was ~p~n",[?MODULE, R]),
+ mnesia_lib:dbg_out("~p was ~tp~n",[?MODULE, R]),
{stop, shutdown, State};
handle_info(Msg, State) ->
- error("~p got unexpected info: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected info: ~tp~n", [?MODULE, Msg]),
{noreply, State}.
%%----------------------------------------------------------------------
diff --git a/lib/mnesia/src/mnesia_schema.erl b/lib/mnesia/src/mnesia_schema.erl
index f71ee26d7c..83cc19c678 100644
--- a/lib/mnesia/src/mnesia_schema.erl
+++ b/lib/mnesia/src/mnesia_schema.erl
@@ -386,7 +386,7 @@ delete_schema(Ns) when is_list(Ns), Ns /= [] ->
[] ->
ok;
BadReplies ->
- verbose("~s: ~p~n", [Reason, BadReplies]),
+ verbose("~s: ~tp~n", [Reason, BadReplies]),
{error, {"All nodes not running", BadReplies}}
end;
{_Replies, BadNs} ->
@@ -467,10 +467,10 @@ opt_create_dir(UseDir, Dir) when UseDir == true->
false ->
case file:make_dir(Dir) of
ok ->
- verbose("Create Directory ~p~n", [Dir]),
+ verbose("Create Directory ~tp~n", [Dir]),
ok;
{error, Reason} ->
- verbose("Cannot create mnesia dir ~p~n", [Reason]),
+ verbose("Cannot create mnesia dir ~tp~n", [Reason]),
{error, {"Cannot create Mnesia dir", Dir, Reason}}
end
end;
@@ -1470,7 +1470,7 @@ verify_backend_type(Name, Module) ->
[] ->
ok;
_Other ->
- io:fwrite(user, "Missing backend_type exports: ~p~n", [_Other]),
+ io:fwrite(user, "Missing backend_type exports: ~tp~n", [_Other]),
mnesia:abort({bad_type, {backend_type,Name,Module}})
end.
@@ -1776,7 +1776,7 @@ make_del_table_copy(Tab, Node) ->
mnesia:abort({combine_error, Tab, "Last replica"});
[] ->
ensure_active(Cs),
- dbg_out("Last replica deleted in table ~p~n", [Tab]),
+ dbg_out("Last replica deleted in table ~tp~n", [Tab]),
make_delete_table(Tab, whole_table);
_ when Tab == schema ->
%% ensure_active(Cs2),
@@ -2178,13 +2178,13 @@ do_write_table_property(Tab, Prop) ->
case change_prop_in_existing_op(Tab, Prop, write_property, Store) of
true ->
dbg_out("change_prop_in_existing_op"
- "(~p,~p,write_property,Store) -> true~n",
+ "(~tp,~p,write_property,Store) -> true~n",
[Tab,Prop]),
%% we have merged the table prop into the create_table op
ok;
false ->
dbg_out("change_prop_in_existing_op"
- "(~p,~p,write_property,Store) -> false~n",
+ "(~tp,~p,write_property,Store) -> false~n",
[Tab,Prop]),
%% this must be an existing table
get_tid_ts_and_lock(Tab, none),
@@ -2315,13 +2315,13 @@ do_delete_table_property(Tab, PropKey) ->
case change_prop_in_existing_op(Tab, PropKey, delete_property, Store) of
true ->
dbg_out("change_prop_in_existing_op"
- "(~p,~p,delete_property,Store) -> true~n",
+ "(~tp,~p,delete_property,Store) -> true~n",
[Tab,PropKey]),
%% we have merged the table prop into the create_table op
ok;
false ->
dbg_out("change_prop_in_existing_op"
- "(~p,~p,delete_property,Store) -> false~n",
+ "(~tp,~p,delete_property,Store) -> false~n",
[Tab,PropKey]),
%% this must be an existing table
get_tid_ts_and_lock(Tab, none),
@@ -2435,17 +2435,17 @@ prepare_op(_Tid, {op, sync_trans}, {part, CoordPid}) ->
{sync_trans, CoordPid} ->
{false, optional};
{mnesia_down, _Node} = Else ->
- mnesia_lib:verbose("sync_op terminated due to ~p~n", [Else]),
+ mnesia_lib:verbose("sync_op terminated due to ~tp~n", [Else]),
mnesia:abort(Else);
{'EXIT', _, _} = Else ->
- mnesia_lib:verbose("sync_op terminated due to ~p~n", [Else]),
+ mnesia_lib:verbose("sync_op terminated due to ~tp~n", [Else]),
mnesia:abort(Else)
end;
prepare_op(_Tid, {op, sync_trans}, {coord, Nodes}) ->
case receive_sync(Nodes, []) of
{abort, Reason} ->
- mnesia_lib:verbose("sync_op terminated due to ~p~n", [Reason]),
+ mnesia_lib:verbose("sync_op terminated due to ~tp~n", [Reason]),
mnesia:abort(Reason);
Pids ->
[Pid ! {sync_trans, self()} || Pid <- Pids],
@@ -2707,7 +2707,7 @@ prepare_op(_Tid, {op, transform, Fun, TabDef}, _WaitFor) ->
{true, Objs, mandatory}
catch _:Reason ->
mnesia_lib:db_fixtable(Storage, Tab, false),
- mnesia_lib:important("Transform function failed: '~p' in '~p'",
+ mnesia_lib:important("Transform function failed: '~tp' in '~tp'",
[Reason, erlang:get_stacktrace()]),
exit({"Bad transform function", Tab, Fun, node(), Reason})
end
@@ -2719,7 +2719,7 @@ prepare_op(_Tid, {op, merge_schema, TabDef}, _WaitFor) ->
ok ->
{true, optional};
Error ->
- verbose("Merge_Schema ~p failed on ~p: ~p~n", [_Tid,node(),Error]),
+ verbose("Merge_Schema ~p failed on ~p: ~tp~n", [_Tid,node(),Error]),
mnesia:abort({bad_commit, Error})
end;
prepare_op(_Tid, _Op, _WaitFor) ->
@@ -3133,7 +3133,7 @@ ext_real_suffixes(Ext) ->
[M || {_,M} <- Ext])
catch
error:E ->
- verbose("Cant find real ext suffixes (~p)~n", [E]),
+ verbose("Cant find real ext suffixes (~tp)~n", [E]),
[]
end.
@@ -3142,7 +3142,7 @@ ext_tmp_suffixes(Ext) ->
[M || {_,M} <- Ext])
catch
error:E ->
- verbose("Cant find tmp ext suffixes (~p)~n", [E]),
+ verbose("Cant find tmp ext suffixes (~tp)~n", [E]),
[]
end.
@@ -3153,14 +3153,14 @@ info() ->
info(Tab) ->
Props = get_table_properties(Tab),
- io:format("-- Properties for ~w table --- ~n",[Tab]),
+ io:format("-- Properties for ~tw table --- ~n",[Tab]),
info2(Tab, Props).
info2(Tab, [{cstruct, _V} | Tail]) -> % Ignore cstruct
info2(Tab, Tail);
info2(Tab, [{frag_hash, _V} | Tail]) -> % Ignore frag_hash
info2(Tab, Tail);
info2(Tab, [{P, V} | Tail]) ->
- io:format("~-20w -> ~p~n",[P,V]),
+ io:format("~-20tw -> ~tp~n",[P,V]),
info2(Tab, Tail);
info2(_, []) ->
io:format("~n", []).
@@ -3726,7 +3726,7 @@ merge_versions(AnythingNew, Cs, RemoteCs, Force) ->
ok;
true ->
Str = io_lib:format("Bad cookies. Cannot merge definitions of "
- "table ~w. Local = ~w, Remote = ~w~n",
+ "table ~tw. Local = ~w, Remote = ~w~n",
[Cs#cstruct.name, Cs, RemoteCs]),
throw(Str)
end,
@@ -3746,7 +3746,7 @@ merge_versions(AnythingNew, Cs, RemoteCs, Force) ->
do_merge_versions(AnythingNew, Cs, RemoteCs);
true ->
Str1 = io_lib:format("Cannot merge definitions of "
- "table ~w. Local = ~w, Remote = ~w~n",
+ "table ~tw. Local = ~w, Remote = ~w~n",
[Cs#cstruct.name, Cs, RemoteCs]),
throw(Str1)
end.
diff --git a/lib/mnesia/src/mnesia_subscr.erl b/lib/mnesia/src/mnesia_subscr.erl
index c2748f5bae..dfaa20d2d3 100644
--- a/lib/mnesia/src/mnesia_subscr.erl
+++ b/lib/mnesia/src/mnesia_subscr.erl
@@ -264,7 +264,7 @@ handle_call({change, How}, _From, State) ->
{reply, Reply, State};
handle_call(Msg, _From, State) ->
- error("~p got unexpected call: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected call: ~tp~n", [?MODULE, Msg]),
{noreply, State}.
%%----------------------------------------------------------------------
@@ -274,7 +274,7 @@ handle_call(Msg, _From, State) ->
%% {stop, Reason, State} (terminate/2 is called)
%%----------------------------------------------------------------------
handle_cast(Msg, State) ->
- error("~p got unexpected cast: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected cast: ~tp~n", [?MODULE, Msg]),
{noreply, State}.
%%----------------------------------------------------------------------
@@ -292,7 +292,7 @@ handle_info({'EXIT', Pid, _Reason}, State) ->
{noreply, State};
handle_info(Msg, State) ->
- error("~p got unexpected info: ~p~n", [?MODULE, Msg]),
+ error("~p got unexpected info: ~tp~n", [?MODULE, Msg]),
{noreply, State}.
%%----------------------------------------------------------------------
diff --git a/lib/mnesia/src/mnesia_text.erl b/lib/mnesia/src/mnesia_text.erl
index 21adca813a..7d24d09472 100644
--- a/lib/mnesia/src/mnesia_text.erl
+++ b/lib/mnesia/src/mnesia_text.erl
@@ -87,18 +87,18 @@ validate_tab(_) -> error(badtab).
make_tabs([{Tab, Def} | Tail]) ->
try mnesia:table_info(Tab, where_to_read) of
Node ->
- io:format("** Table ~w already exists on ~p, just entering data~n",
+ io:format("** Table ~tw already exists on ~p, just entering data~n",
[Tab, Node]),
make_tabs(Tail)
catch exit:_ -> %% non-existing table
case mnesia:create_table(Tab, Def) of
{aborted, Reason} ->
- io:format("** Failed to create table ~w ~n"
- "** Reason = ~w, Args = ~p~n",
+ io:format("** Failed to create table ~tw ~n"
+ "** Reason = ~tw, Args = ~tp~n",
[Tab, Reason, Def]),
[Tab | make_tabs(Tail)];
_ ->
- io:format("New table ~w~n", [Tab]),
+ io:format("New table ~tw~n", [Tab]),
make_tabs(Tail)
end
end;
@@ -139,12 +139,12 @@ collect_data(Tabs, [{Line, Term} | Tail]) when is_tuple(Term) ->
{value, _} ->
[Term | collect_data(Tabs, Tail)];
_Other ->
- io:format("Object:~p at line ~w unknown\n", [Term,Line]),
+ io:format("Object:~tp at line ~w unknown\n", [Term,Line]),
error(undefined_object)
end;
collect_data(_Tabs, []) -> [];
collect_data(_Tabs, [H|_T]) ->
- io:format("Object:~p unknown\n", [H]),
+ io:format("Object:~tp unknown\n", [H]),
error(undefined_object).
error(What) -> throw({error, What}).
@@ -178,7 +178,7 @@ read_term_from_stream(Stream, File, Line) ->
{ok, {Line, Term}, EndLine};
{error, {NewLine,Mod,What}} ->
Str = Mod:format_error(What),
- io:format("Error in line:~p of:~p ~s\n",
+ io:format("Error in line:~p of:~tp ~ts\n",
[NewLine, File, Str]),
error
end;
diff --git a/lib/mnesia/src/mnesia_tm.erl b/lib/mnesia/src/mnesia_tm.erl
index 305bf14bcf..ebf580d09e 100644
--- a/lib/mnesia/src/mnesia_tm.erl
+++ b/lib/mnesia/src/mnesia_tm.erl
@@ -314,7 +314,7 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor=
?eval_debug_fun({?MODULE, do_abort, pre}, [{tid, Tid}]),
case gb_trees:lookup(Tid, Participants) of
none ->
- verbose("Tried to abort a non participant transaction ~p: ~p~n",
+ verbose("Tried to abort a non participant transaction ~p: ~tp~n",
[Tid, Reason]),
mnesia_locker:release_tid(Tid),
doit_loop(State);
@@ -417,7 +417,7 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor=
{From, {unblock_me, Tab}} ->
case lists:member(Tab, State#state.blocked_tabs) of
false ->
- verbose("Wrong dirty Op blocked on ~p ~p ~p",
+ verbose("Wrong dirty Op blocked on ~p ~tp ~p",
[node(), Tab, From]),
reply(From, unblocked),
doit_loop(State);
@@ -466,11 +466,11 @@ doit_loop(#state{coordinators=Coordinators,participants=Participants,supervisor=
end;
{system, From, Msg} ->
- dbg_out("~p got {system, ~p, ~p}~n", [?MODULE, From, Msg]),
+ dbg_out("~p got {system, ~p, ~tp}~n", [?MODULE, From, Msg]),
sys:handle_system_msg(Msg, From, Sup, ?MODULE, [], State);
Msg ->
- verbose("** ERROR ** ~p got unexpected message: ~p~n", [?MODULE, Msg]),
+ verbose("** ERROR ** ~p got unexpected message: ~tp~n", [?MODULE, Msg]),
doit_loop(State)
end.
@@ -556,7 +556,7 @@ handle_exit(Pid, Reason, State) ->
%% We got exit from a local fool
doit_loop(State);
{P = #participant{}, _RestP} ->
- fatal("Participant ~p in transaction ~p died ~p~n",
+ fatal("Participant ~p in transaction ~p died ~tp~n",
[P#participant.pid, P#participant.tid, Reason]),
NewPs = gb_trees:delete(P#participant.tid,State#state.participants),
doit_loop(State#state{participants = NewPs})
@@ -598,7 +598,7 @@ recover_coordinator(Tid, Etabs) ->
ok %% to the new nested trans store.
end
catch _:Reason ->
- dbg_out("Recovery of coordinator ~p failed:~n",
+ dbg_out("Recovery of coordinator ~p failed: ~tp~n",
[Tid, {Reason, erlang:get_stacktrace()}]),
Protocol = asym_trans,
tell_outcome(Tid, Protocol, node(), CheckNodes, TellNodes)
@@ -941,7 +941,7 @@ decr(_X) -> 0.
return_abort(Fun, Args, Reason) ->
{_Mod, Tid, Ts} = get(mnesia_activity_state),
- dbg_out("Transaction ~p calling ~p with ~p failed: ~n ~p~n",
+ dbg_out("Transaction ~p calling ~tp with ~tp failed: ~n ~tp~n",
[Tid, Fun, Args, Reason]),
OldStore = Ts#tidstore.store,
Nodes = get_elements(nodes, OldStore),
@@ -1714,7 +1714,7 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
mnesia_schema:undo_prepare_commit(Tid, C0);
Msg ->
- verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~p~n",
+ verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~tp~n",
[Tid, Msg])
end;
{Tid, {do_abort, Reason}} ->
@@ -1730,7 +1730,7 @@ commit_participant(Coord, Tid, Bin, C0, DiscNs, _RamNs) ->
Msg ->
reply(Coord, {do_abort, Tid, self(), {bad_commit,internal}}),
- verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~p~n",
+ verbose("** ERROR ** commit_participant ~p, got unexpected msg: ~tp~n",
[Tid, Msg])
end
catch _:Reason ->
@@ -1804,7 +1804,7 @@ do_update(Tid, Storage, [Op | Ops], OldRes) ->
%% Determine actual storage type and try again.
%% BUGBUG: Updates may be lost if table is transformed.
ST = erlang:get_stacktrace(),
- verbose("do_update in ~w failed: ~p -> {'EXIT', ~p}~n",
+ verbose("do_update in ~w failed: ~tp -> {'EXIT', ~tp}~n",
[Tid, Op, {Reason, ST}]),
do_update(Tid, Storage, Ops, OldRes)
end;
@@ -1919,7 +1919,7 @@ do_snmp(Tid, [Head|Tail]) ->
%% deleted our local replica or recently deattached
%% the snmp table
ST = erlang:get_stacktrace(),
- verbose("do_snmp in ~w failed: ~p -> {'EXIT', ~p}~n",
+ verbose("do_snmp in ~w failed: ~tp -> {'EXIT', ~tp}~n",
[Tid, Head, {Reason, ST}])
end,
do_snmp(Tid, Tail).
@@ -2151,7 +2151,7 @@ pr_participant(Stream, P) ->
true -> Commit0
end,
pr_tid(Stream, P#participant.tid),
- io:format(Stream, "with participant objects ~p~n", [Commit]).
+ io:format(Stream, "with participant objects ~tp~n", [Commit]).
pr_tid(Stream, Tid) ->
@@ -2193,7 +2193,7 @@ search_pr_participant(S, [ P | Tail]) ->
true -> Commit0
end,
- io:format("~p~n", [Commit]),
+ io:format("~tp~n", [Commit]),
search_pr_participant(S,Tail); %% !!!!!
true ->
search_pr_participant(S, Tail)
@@ -2214,12 +2214,12 @@ display_pid_info(Pid) ->
Reds = fetch(reductions, Info),
LM = length(fetch(messages, Info)),
pformat(io_lib:format("~p", [Pid]),
- io_lib:format("~p", [Call]),
- io_lib:format("~p", [Curr]), Reds, LM)
+ io_lib:format("~tp", [Call]),
+ io_lib:format("~tp", [Curr]), Reds, LM)
end.
pformat(A1, A2, A3, A4, A5) ->
- io:format( "~-12s ~-21s ~-21s ~9w ~4w~n", [A1,A2,A3,A4,A5]).
+ io:format( "~-12s ~-21ts ~-21ts ~9w ~4w~n", [A1,A2,A3,A4,A5]).
fetch(Key, Info) ->
case lists:keysearch(Key, 1, Info) of
diff --git a/lib/observer/src/cdv_atom_cb.erl b/lib/observer/src/cdv_atom_cb.erl
index a123354c8f..86cdf2fd6d 100644
--- a/lib/observer/src/cdv_atom_cb.erl
+++ b/lib/observer/src/cdv_atom_cb.erl
@@ -42,7 +42,7 @@ get_info(_) ->
{Info,TW}.
format({Bin,q}) when is_binary(Bin) ->
- [$'|binary_to_list(Bin)];
+ [$'|lists:flatten(io_lib:format("~ts",[Bin]))];
format({Bin,nq}) when is_binary(Bin) ->
lists:flatten(io_lib:format("~ts",[Bin]));
format(D) ->
diff --git a/lib/observer/src/cdv_bin_cb.erl b/lib/observer/src/cdv_bin_cb.erl
index 5472d36a6f..5502869973 100644
--- a/lib/observer/src/cdv_bin_cb.erl
+++ b/lib/observer/src/cdv_bin_cb.erl
@@ -38,6 +38,7 @@ init_bin_page(Parent,{Type,Bin}) ->
[{"Format \~p",cdv_html_wx,{Type,format_bin_fun("~p",Bin)}},
{"Format \~tp",cdv_html_wx,{Type,format_bin_fun("~tp",Bin)}},
{"Format \~w",cdv_html_wx,{Type,format_bin_fun("~w",Bin)}},
+ {"Format \~tw",cdv_html_wx,{Type,format_bin_fun("~tw",Bin)}},
{"Format \~s",cdv_html_wx,{Type,format_bin_fun("~s",Bin)}},
{"Format \~ts",cdv_html_wx,{Type,format_bin_fun("~ts",Bin)}},
{"Hex",cdv_html_wx,{Type,hex_binary_fun(Bin)}},
@@ -56,7 +57,7 @@ format_bin_fun(Format,Bin) ->
binary_to_term_fun(Bin) ->
fun() ->
try binary_to_term(Bin) of
- Term -> plain_html(io_lib:format("~p",[Term]))
+ Term -> plain_html(io_lib:format("~tp",[Term]))
catch error:badarg ->
Warning = "This binary can not be converted to an Erlang term",
observer_html_lib:warning(Warning)
diff --git a/lib/observer/src/cdv_detail_wx.erl b/lib/observer/src/cdv_detail_wx.erl
index 27057fd27f..4c26e447a6 100644
--- a/lib/observer/src/cdv_detail_wx.erl
+++ b/lib/observer/src/cdv_detail_wx.erl
@@ -133,7 +133,7 @@ handle_event(Event, _State) ->
error({unhandled_event, Event}).
handle_info(_Info, State) ->
- %% io:format("~p: ~p, Handle info: ~p~n", [?MODULE, ?LINE, _Info]),
+ %% io:format("~p: ~p, Handle info: ~tp~n", [?MODULE, ?LINE, _Info]),
{noreply, State}.
handle_call(Call, From, _State) ->
diff --git a/lib/observer/src/cdv_html_wx.erl b/lib/observer/src/cdv_html_wx.erl
index 0ab0ba4315..5158e95a65 100644
--- a/lib/observer/src/cdv_html_wx.erl
+++ b/lib/observer/src/cdv_html_wx.erl
@@ -62,7 +62,7 @@ handle_info(active, State) ->
{noreply, State};
handle_info(Info, State) ->
- io:format("~p:~p: Unhandled info: ~p~n", [?MODULE, ?LINE, Info]),
+ io:format("~p:~p: Unhandled info: ~tp~n", [?MODULE, ?LINE, Info]),
{noreply, State}.
terminate(_Reason, _State) ->
@@ -72,7 +72,7 @@ code_change(_, _, State) ->
{ok, State}.
handle_call(Msg, _From, State) ->
- io:format("~p~p: Unhandled Call ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p~p: Unhandled Call ~tp~n",[?MODULE, ?LINE, Msg]),
{reply, ok, State}.
handle_cast({detail_win_closed, Id},#state{expand_wins=Opened0}=State) ->
@@ -80,7 +80,7 @@ handle_cast({detail_win_closed, Id},#state{expand_wins=Opened0}=State) ->
{noreply, State#state{expand_wins=Opened}};
handle_cast(Msg, State) ->
- io:format("~p~p: Unhandled cast ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p~p: Unhandled cast ~tp~n",[?MODULE, ?LINE, Msg]),
{noreply, State}.
handle_event(#wx{event=#wxHtmlLink{type=command_html_link_clicked,
@@ -118,7 +118,7 @@ handle_event(#wx{event=#wxHtmlLink{type=command_html_link_clicked,
{noreply, NewState};
handle_event(Event, State) ->
- io:format("~p:~p: Unhandled event ~p\n", [?MODULE,?LINE,Event]),
+ io:format("~p:~p: Unhandled event ~tp\n", [?MODULE,?LINE,Event]),
{noreply, State}.
%%%-----------------------------------------------------------------
diff --git a/lib/observer/src/cdv_info_wx.erl b/lib/observer/src/cdv_info_wx.erl
index 01fe6b15f2..7e416dd11a 100644
--- a/lib/observer/src/cdv_info_wx.erl
+++ b/lib/observer/src/cdv_info_wx.erl
@@ -65,7 +65,7 @@ handle_info(active, State) ->
{noreply, State};
handle_info(Info, State) ->
- io:format("~p:~p: Unhandled info: ~p~n", [?MODULE, ?LINE, Info]),
+ io:format("~p:~p: Unhandled info: ~tp~n", [?MODULE, ?LINE, Info]),
{noreply, State}.
terminate(_Reason, _State) ->
@@ -88,11 +88,11 @@ handle_call(new_dump, _From, #state{callback=Callback,panel=Panel,
{reply, ok, State#state{fpanel=NewFPanel,trunc_warn=TW}};
handle_call(Msg, _From, State) ->
- io:format("~p~p: Unhandled Call ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p~p: Unhandled Call ~tp~n",[?MODULE, ?LINE, Msg]),
{reply, ok, State}.
handle_cast(Msg, State) ->
- io:format("~p~p: Unhandled cast ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p~p: Unhandled cast ~tp~n",[?MODULE, ?LINE, Msg]),
{noreply, State}.
handle_event(#wx{event=#wxMouse{type=left_down},userData=Target}, State) ->
@@ -108,7 +108,7 @@ handle_event(#wx{obj=Obj,event=#wxMouse{type=leave_window}},State) ->
{noreply, State};
handle_event(Event, State) ->
- io:format("~p:~p: Unhandled event ~p\n", [?MODULE,?LINE,Event]),
+ io:format("~p:~p: Unhandled event ~tp\n", [?MODULE,?LINE,Event]),
{noreply, State}.
%%%-----------------------------------------------------------------
diff --git a/lib/observer/src/cdv_multi_wx.erl b/lib/observer/src/cdv_multi_wx.erl
index b511503752..93f045b1da 100644
--- a/lib/observer/src/cdv_multi_wx.erl
+++ b/lib/observer/src/cdv_multi_wx.erl
@@ -94,7 +94,7 @@ handle_info(active, State) ->
{noreply, NewState};
handle_info(Info, State) ->
- io:format("~p:~p: Unhandled info: ~p~n", [?MODULE, ?LINE, Info]),
+ io:format("~p:~p: Unhandled info: ~tp~n", [?MODULE, ?LINE, Info]),
{noreply, State}.
terminate(_Reason, _State) ->
@@ -112,11 +112,11 @@ handle_call(new_dump, _From, State) ->
{reply, ok, NewState};
handle_call(Msg, _From, State) ->
- io:format("~p:~p: Unhandled Call ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p:~p: Unhandled Call ~tp~n",[?MODULE, ?LINE, Msg]),
{reply, ok, State}.
handle_cast(Msg, State) ->
- io:format("~p:~p: Unhandled cast ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p:~p: Unhandled cast ~tp~n",[?MODULE, ?LINE, Msg]),
{noreply, State}.
handle_event(#wx{event=#wxCommand{type=command_listbox_selected,
@@ -136,7 +136,7 @@ handle_event(#wx{event=#wxCommand{type=command_listbox_selected,
{noreply,NewState};
handle_event(Event, State) ->
- io:format("~p:~p: Unhandled event ~p\n", [?MODULE,?LINE,Event]),
+ io:format("~p:~p: Unhandled event ~tp\n", [?MODULE,?LINE,Event]),
{noreply, State}.
%%%%%%%%%%%%%%%%%%%%%%% Internal %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/observer/src/cdv_proc_cb.erl b/lib/observer/src/cdv_proc_cb.erl
index 592150146b..f10650bbb7 100644
--- a/lib/observer/src/cdv_proc_cb.erl
+++ b/lib/observer/src/cdv_proc_cb.erl
@@ -71,7 +71,7 @@ get_details(Id, _) ->
Proplist0 =
crashdump_viewer:to_proplist(record_info(fields,proc),Info),
Proplist = [{expand_table,Tab}|Proplist0],
- Title = io_lib:format("~s (~s)",[Info#proc.name, Id]),
+ Title = io_lib:format("~ts (~s)",[Info#proc.name, Id]),
{ok,{Title,Proplist,TW}};
{error,{other_node,NodeId}} ->
Info = "The process you are searching for was residing on "
diff --git a/lib/observer/src/cdv_table_wx.erl b/lib/observer/src/cdv_table_wx.erl
index df16230b70..ba23758ea6 100644
--- a/lib/observer/src/cdv_table_wx.erl
+++ b/lib/observer/src/cdv_table_wx.erl
@@ -74,7 +74,7 @@ handle_info(active, State) ->
{noreply, State};
handle_info(Info, State) ->
- io:format("~p:~p: Unhandled info: ~p~n", [?MODULE, ?LINE, Info]),
+ io:format("~p:~p: Unhandled info: ~tp~n", [?MODULE, ?LINE, Info]),
{noreply, State}.
terminate(_Reason, _State) ->
@@ -84,15 +84,15 @@ code_change(_, _, State) ->
{ok, State}.
handle_call(Msg, _From, State) ->
- io:format("~p~p: Unhandled Call ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p~p: Unhandled Call ~tp~n",[?MODULE, ?LINE, Msg]),
{reply, ok, State}.
handle_cast(Msg, State) ->
- io:format("~p~p: Unhandled cast ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p~p: Unhandled cast ~tp~n",[?MODULE, ?LINE, Msg]),
{noreply, State}.
handle_event(Event, State) ->
- io:format("~p:~p: Unhandled event ~p\n", [?MODULE,?LINE,Event]),
+ io:format("~p:~p: Unhandled event ~tp\n", [?MODULE,?LINE,Event]),
{noreply, State}.
%%%%%%%%%%%%%%%%%%%%%%% Internal %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/observer/src/cdv_term_cb.erl b/lib/observer/src/cdv_term_cb.erl
index f0d90dde7c..f206d7e4c9 100644
--- a/lib/observer/src/cdv_term_cb.erl
+++ b/lib/observer/src/cdv_term_cb.erl
@@ -57,13 +57,7 @@ expand(['#CDVBin',Offset,Size,Pos], true) ->
{ok,Bin} = crashdump_viewer:expand_binary({Offset,Size,Pos}),
Bin;
expand(Bin, Tab) when is_binary(Bin), not is_boolean(Tab) ->
- Size = byte_size(Bin),
- PrevSize = min(Size, 10) * 8,
- <<Preview:PrevSize, _/binary>> = Bin,
- Hash = erlang:phash2(Bin),
- Key = {Preview, Size, Hash},
- ets:insert(Tab, {Key,Bin}),
- ['#OBSBin',Preview,Size,Hash];
+ observer_lib:make_obsbin(Bin, Tab);
expand([H|T], Expand) ->
case expand(T, Expand) of
ET when is_list(ET) ->
diff --git a/lib/observer/src/cdv_virtual_list_wx.erl b/lib/observer/src/cdv_virtual_list_wx.erl
index ebf58865e9..f3daae8e4d 100644
--- a/lib/observer/src/cdv_virtual_list_wx.erl
+++ b/lib/observer/src/cdv_virtual_list_wx.erl
@@ -73,7 +73,7 @@ start_detail_win(Id) ->
"#Port"++_ ->
start_detail_win(Id, port);
_ ->
- io:format("cdv: unknown identifier: ~p~n",[Id]),
+ io:format("cdv: unknown identifier: ~tp~n",[Id]),
ignore
end.
@@ -195,7 +195,7 @@ call(Holder, What) when is_pid(Holder) ->
erlang:demonitor(Ref),
Res
after 5000 ->
- io:format("Hanging call ~p~n",[What]),
+ io:format("Hanging call ~tp~n",[What]),
""
end;
call(_,_) ->
@@ -214,7 +214,7 @@ handle_info(active, State) ->
{noreply, State};
handle_info(Info, State) ->
- io:format("~p:~p, Unexpected info: ~p~n", [?MODULE, ?LINE, Info]),
+ io:format("~p:~p, Unexpected info: ~tp~n", [?MODULE, ?LINE, Info]),
{noreply, State}.
terminate(_Reason, #state{holder=Holder}) ->
@@ -236,7 +236,7 @@ handle_call(new_dump, _From,
{reply, ok, State#state{detail_wins=[],holder=NewHolder,trunc_warn=TW}};
handle_call(Msg, _From, State) ->
- io:format("~p:~p: Unhandled call ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p:~p: Unhandled call ~tp~n",[?MODULE, ?LINE, Msg]),
{reply, ok, State}.
handle_cast({start_detail_win,Id}, State) ->
@@ -248,7 +248,7 @@ handle_cast({detail_win_closed, Id},#state{detail_wins=Opened}=State) ->
{noreply, State#state{detail_wins=Opened2}};
handle_cast(Msg, State) ->
- io:format("~p:~p: Unhandled cast ~p~n", [?MODULE, ?LINE, Msg]),
+ io:format("~p:~p: Unhandled cast ~tp~n", [?MODULE, ?LINE, Msg]),
{noreply, State}.
%%%%%%%%%%%%%%%%%%%%LOOP%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -322,7 +322,7 @@ handle_event(#wx{event=#wxList{type=command_list_item_activated,
{noreply, State};
handle_event(Event, State) ->
- io:format("~p:~p: handle event ~p\n", [?MODULE, ?LINE, Event]),
+ io:format("~p:~p: handle event ~tp\n", [?MODULE, ?LINE, Event]),
{noreply, State}.
@@ -382,7 +382,7 @@ table_holder(#holder{callback=Callback, attrs=Attrs, info=Info}=S0) ->
stop ->
ok;
What ->
- io:format("Table holder got ~p~n",[What]),
+ io:format("Table holder got ~tp~n",[What]),
table_holder(S0)
end.
diff --git a/lib/observer/src/cdv_wx.erl b/lib/observer/src/cdv_wx.erl
index 1e3fb6289e..898f39ded2 100644
--- a/lib/observer/src/cdv_wx.erl
+++ b/lib/observer/src/cdv_wx.erl
@@ -306,7 +306,7 @@ handle_info({'EXIT', Pid, normal}, #state{server=Pid}=State) ->
{stop, normal, State};
handle_info({'EXIT', Pid, _Reason}, State) ->
- io:format("Child (~s) crashed exiting: ~p ~p~n",
+ io:format("Child (~s) crashed exiting: ~p ~tp~n",
[pid2panel(Pid, State), Pid,_Reason]),
{stop, normal, State};
diff --git a/lib/observer/src/crashdump_viewer.erl b/lib/observer/src/crashdump_viewer.erl
index 8d70f5e2c8..fd8b3b9e67 100644
--- a/lib/observer/src/crashdump_viewer.erl
+++ b/lib/observer/src/crashdump_viewer.erl
@@ -200,13 +200,13 @@ do_script_start(StartFun) ->
{'EXIT', Pid, normal} ->
ok;
{'EXIT', Pid, Reason} ->
- io:format("\ncdv crash: ~p\n",[Reason])
+ io:format("\ncdv crash: ~tp\n",[Reason])
end;
_ ->
io:format("\ncdv crash: ~p\n",[unknown_reason])
end;
Error ->
- io:format("\ncdv start failed: ~p\n",[Error])
+ io:format("\ncdv start failed: ~tp\n",[Error])
end.
usage() ->
@@ -340,7 +340,7 @@ handle_call(general_info,_From,State=#state{file=File}) ->
handle_call({expand_binary,{Offset,Size,Pos}},_From,State=#state{file=File}) ->
Fd = open(File),
pos_bof(Fd,Pos),
- {Bin,_Line} = get_binary(Offset,Size,val(Fd)),
+ {Bin,_Line} = get_binary(Offset,Size,bytes(Fd)),
close(Fd),
{reply,{ok,Bin},State};
handle_call(procs_summary,_From,State=#state{file=File,wordsize=WS}) ->
@@ -512,9 +512,9 @@ unexpected(_Fd,{eof,_LastLine},_Where) ->
ok; % truncated file
unexpected(Fd,{part,What},Where) ->
skip_rest_of_line(Fd),
- io:format("WARNING: Found unexpected line in ~s:~n~s ...~n",[Where,What]);
+ io:format("WARNING: Found unexpected line in ~ts:~n~ts ...~n",[Where,What]);
unexpected(_Fd,What,Where) ->
- io:format("WARNING: Found unexpected line in ~s:~n~s~n",[Where,What]).
+ io:format("WARNING: Found unexpected line in ~ts:~n~ts~n",[Where,What]).
truncated_warning([]) ->
[];
@@ -701,9 +701,24 @@ skip(Fd,<<>>) ->
end.
-val(Fd) ->
- val(Fd, "-1").
-val(Fd, NoExist) ->
+string(Fd) ->
+ string(Fd, "-1").
+string(Fd,NoExist) ->
+ case bytes(Fd,noexist) of
+ noexist -> NoExist;
+ Val -> byte_list_to_string(Val)
+ end.
+
+byte_list_to_string(ByteList) ->
+ Bin = list_to_binary(ByteList),
+ case unicode:characters_to_list(Bin) of
+ Str when is_list(Str) -> Str;
+ _ -> ByteList
+ end.
+
+bytes(Fd) ->
+ bytes(Fd, "-1").
+bytes(Fd, NoExist) ->
case get_rest_of_line(Fd) of
{eof,[]} -> NoExist;
[] -> NoExist;
@@ -742,7 +757,7 @@ get_lines_to_empty(Fd,<<$\n:8,Bin/binary>>,[],Lines) ->
put_chunk(Fd,Bin),
lists:reverse(Lines);
get_lines_to_empty(Fd,<<$\n:8,Bin/binary>>,Acc,Lines) ->
- get_lines_to_empty(Fd,Bin,[],[lists:reverse(Acc)|Lines]);
+ get_lines_to_empty(Fd,Bin,[],[byte_list_to_string(lists:reverse(Acc))|Lines]);
get_lines_to_empty(Fd,<<$\r:8,Bin/binary>>,Acc,Lines) ->
get_lines_to_empty(Fd,Bin,Acc,Lines);
get_lines_to_empty(Fd,<<$\s:8,Bin/binary>>,[],Lines) ->
@@ -754,7 +769,7 @@ get_lines_to_empty(Fd,<<>>,Acc,Lines) ->
{ok,Bin} ->
get_lines_to_empty(Fd,Bin,Acc,Lines);
eof ->
- lists:reverse(Lines,[lists:reverse(Acc)])
+ lists:reverse(Lines,[byte_list_to_string(lists:reverse(Acc))])
end.
split(Str) ->
@@ -816,7 +831,7 @@ do_read_file(File) ->
{ok,Binaries,DumpVsn};
_Other ->
R = io_lib:format(
- "~s is not an Erlang crash dump~n",
+ "~ts is not an Erlang crash dump~n",
[File]),
close(Fd),
{error,R}
@@ -824,20 +839,20 @@ do_read_file(File) ->
{ok,<<"<Erlang crash dump>",_Rest/binary>>} ->
%% old version - no longer supported
R = io_lib:format(
- "The crashdump ~s is in the pre-R10B format, "
+ "The crashdump ~ts is in the pre-R10B format, "
"which is no longer supported.~n",
[File]),
close(Fd),
{error,R};
_Other ->
R = io_lib:format(
- "~s is not an Erlang crash dump~n",
+ "~ts is not an Erlang crash dump~n",
[File]),
close(Fd),
{error,R}
end;
_other ->
- R = io_lib:format("~s is not an Erlang crash dump~n",[File]),
+ R = io_lib:format("~ts is not an Erlang crash dump~n",[File]),
{error,R}
end.
@@ -986,7 +1001,7 @@ general_info(File) ->
instr_info=InstrInfo}.
get_slogan_and_sysvsn(Fd,Acc) ->
- case val(Fd,eof) of
+ case string(Fd,eof) of
"Slogan: " ++ SloganPart when Acc==[] ->
get_slogan_and_sysvsn(Fd,[SloganPart]);
"System version: " ++ SystemVsn ->
@@ -1000,14 +1015,14 @@ get_slogan_and_sysvsn(Fd,Acc) ->
get_general_info(Fd,GenInfo) ->
case line_head(Fd) of
"Compiled" ->
- get_general_info(Fd,GenInfo#general_info{compile_time=val(Fd)});
+ get_general_info(Fd,GenInfo#general_info{compile_time=bytes(Fd)});
"Taints" ->
- Val = case val(Fd) of "-1" -> "(none)"; Line -> Line end,
+ Val = case string(Fd) of "-1" -> "(none)"; Line -> Line end,
get_general_info(Fd,GenInfo#general_info{taints=Val});
"Atoms" ->
- get_general_info(Fd,GenInfo#general_info{num_atoms=val(Fd)});
+ get_general_info(Fd,GenInfo#general_info{num_atoms=bytes(Fd)});
"Calling Thread" ->
- get_general_info(Fd,GenInfo#general_info{thread=val(Fd)});
+ get_general_info(Fd,GenInfo#general_info{thread=bytes(Fd)});
"=" ++ _next_tag ->
GenInfo;
Other ->
@@ -1068,15 +1083,15 @@ get_proc_details(File,Pid,WS,DumpVsn,Binaries) ->
get_procinfo(Fd,Fun,Proc,WS) ->
case line_head(Fd) of
"State" ->
- State = case val(Fd) of
+ State = case bytes(Fd) of
"Garbing" -> "Garbing\n(limited info)";
State0 -> State0
end,
get_procinfo(Fd,Fun,Proc#proc{state=State},WS);
"Name" ->
- get_procinfo(Fd,Fun,Proc#proc{name=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{name=string(Fd)},WS);
"Spawned as" ->
- IF = val(Fd),
+ IF = string(Fd),
case Proc#proc.name of
undefined ->
get_procinfo(Fd,Fun,Proc#proc{name=IF,init_func=IF},WS);
@@ -1085,17 +1100,17 @@ get_procinfo(Fd,Fun,Proc,WS) ->
end;
"Message queue length" ->
%% stored as integer so we can sort on it
- get_procinfo(Fd,Fun,Proc#proc{msg_q_len=list_to_integer(val(Fd))},WS);
+ get_procinfo(Fd,Fun,Proc#proc{msg_q_len=list_to_integer(bytes(Fd))},WS);
"Reductions" ->
%% stored as integer so we can sort on it
- get_procinfo(Fd,Fun,Proc#proc{reds=list_to_integer(val(Fd))},WS);
+ get_procinfo(Fd,Fun,Proc#proc{reds=list_to_integer(bytes(Fd))},WS);
"Stack+heap" ->
%% stored as integer so we can sort on it
get_procinfo(Fd,Fun,Proc#proc{stack_heap=
- list_to_integer(val(Fd))*WS},WS);
+ list_to_integer(bytes(Fd))*WS},WS);
"Memory" ->
%% stored as integer so we can sort on it
- get_procinfo(Fd,Fun,Proc#proc{memory=list_to_integer(val(Fd))},WS);
+ get_procinfo(Fd,Fun,Proc#proc{memory=list_to_integer(bytes(Fd))},WS);
{eof,_} ->
Proc; % truncated file
Other ->
@@ -1117,67 +1132,67 @@ all_procinfo(Fd,Fun,Proc,WS,LineHead) ->
case LineHead of
%% - START - moved from get_procinfo -
"Spawned by" ->
- case val(Fd) of
+ case bytes(Fd) of
"[]" ->
get_procinfo(Fd,Fun,Proc,WS);
Parent ->
get_procinfo(Fd,Fun,Proc#proc{parent=Parent},WS)
end;
"Started" ->
- get_procinfo(Fd,Fun,Proc#proc{start_time=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{start_time=bytes(Fd)},WS);
"Last scheduled in for" ->
get_procinfo(Fd,Fun,Proc#proc{current_func=
{"Last scheduled in for",
- val(Fd)}},WS);
+ string(Fd)}},WS);
"Current call" ->
get_procinfo(Fd,Fun,Proc#proc{current_func={"Current call",
- val(Fd)}},WS);
+ string(Fd)}},WS);
"Number of heap fragments" ->
- get_procinfo(Fd,Fun,Proc#proc{num_heap_frag=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{num_heap_frag=bytes(Fd)},WS);
"Heap fragment data" ->
- get_procinfo(Fd,Fun,Proc#proc{heap_frag_data=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{heap_frag_data=bytes(Fd)},WS);
"OldHeap" ->
- Bytes = list_to_integer(val(Fd))*WS,
+ Bytes = list_to_integer(bytes(Fd))*WS,
get_procinfo(Fd,Fun,Proc#proc{old_heap=Bytes},WS);
"Heap unused" ->
- Bytes = list_to_integer(val(Fd))*WS,
+ Bytes = list_to_integer(bytes(Fd))*WS,
get_procinfo(Fd,Fun,Proc#proc{heap_unused=Bytes},WS);
"OldHeap unused" ->
- Bytes = list_to_integer(val(Fd))*WS,
+ Bytes = list_to_integer(bytes(Fd))*WS,
get_procinfo(Fd,Fun,Proc#proc{old_heap_unused=Bytes},WS);
"New heap start" ->
- get_procinfo(Fd,Fun,Proc#proc{new_heap_start=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{new_heap_start=bytes(Fd)},WS);
"New heap top" ->
- get_procinfo(Fd,Fun,Proc#proc{new_heap_top=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{new_heap_top=bytes(Fd)},WS);
"Stack top" ->
- get_procinfo(Fd,Fun,Proc#proc{stack_top=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{stack_top=bytes(Fd)},WS);
"Stack end" ->
- get_procinfo(Fd,Fun,Proc#proc{stack_end=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{stack_end=bytes(Fd)},WS);
"Old heap start" ->
- get_procinfo(Fd,Fun,Proc#proc{old_heap_start=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{old_heap_start=bytes(Fd)},WS);
"Old heap top" ->
- get_procinfo(Fd,Fun,Proc#proc{old_heap_top=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{old_heap_top=bytes(Fd)},WS);
"Old heap end" ->
- get_procinfo(Fd,Fun,Proc#proc{old_heap_end=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{old_heap_end=bytes(Fd)},WS);
%% - END - moved from get_procinfo -
"Last calls" ->
get_procinfo(Fd,Fun,Proc#proc{last_calls=get_lines_to_empty(Fd)},WS);
"Link list" ->
- {Links,Monitors,MonitoredBy} = parse_link_list(val(Fd),[],[],[]),
+ {Links,Monitors,MonitoredBy} = parse_link_list(bytes(Fd),[],[],[]),
get_procinfo(Fd,Fun,Proc#proc{links=Links,
monitors=Monitors,
mon_by=MonitoredBy},WS);
"Program counter" ->
- get_procinfo(Fd,Fun,Proc#proc{prog_count=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{prog_count=string(Fd)},WS);
"CP" ->
- get_procinfo(Fd,Fun,Proc#proc{cp=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{cp=string(Fd)},WS);
"arity = " ++ Arity ->
%%! Temporary workaround
get_procinfo(Fd,Fun,Proc#proc{arity=Arity--"\r\n"},WS);
"Run queue" ->
- get_procinfo(Fd,Fun,Proc#proc{run_queue=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{run_queue=string(Fd)},WS);
"Internal State" ->
- get_procinfo(Fd,Fun,Proc#proc{int_state=val(Fd)},WS);
+ get_procinfo(Fd,Fun,Proc#proc{int_state=string(Fd)},WS);
"=" ++ _next_tag ->
Proc;
Other ->
@@ -1204,7 +1219,7 @@ parse_link_list(", "++Rest,Links,Monitors,MonitoredBy) ->
parse_link_list([],Links,Monitors,MonitoredBy) ->
{lists:reverse(Links),lists:reverse(Monitors),lists:reverse(MonitoredBy)};
parse_link_list(Unexpected,Links,Monitors,MonitoredBy) ->
- io:format("WARNING: found unexpected data in link list:~n~s~n",[Unexpected]),
+ io:format("WARNING: found unexpected data in link list:~n~ts~n",[Unexpected]),
parse_link_list([],Links,Monitors,MonitoredBy).
@@ -1363,7 +1378,7 @@ read_stack_dump(Fd,Pid,BinAddrAdj,Dict) ->
end.
read_stack_dump1(Fd,BinAddrAdj,Dict,Acc) ->
%% This function is never called if the dump is truncated in {?proc_heap,Pid}
- case val(Fd) of
+ case bytes(Fd) of
"=" ++ _next_tag ->
lists:reverse(Acc);
Line ->
@@ -1391,7 +1406,7 @@ read_messages(Fd,Pid,BinAddrAdj,Dict) ->
end.
read_messages1(Fd,BinAddrAdj,Dict,Acc) ->
%% This function is never called if the dump is truncated in {?proc_heap,Pid}
- case val(Fd) of
+ case bytes(Fd) of
"=" ++ _next_tag ->
lists:reverse(Acc);
Line ->
@@ -1419,7 +1434,7 @@ read_dictionary(Fd,Pid,BinAddrAdj,Dict) ->
end.
read_dictionary1(Fd,BinAddrAdj,Dict,Acc) ->
%% This function is never called if the dump is truncated in {?proc_heap,Pid}
- case val(Fd) of
+ case bytes(Fd) of
"=" ++ _next_tag ->
lists:reverse(Acc);
Line ->
@@ -1451,7 +1466,7 @@ read_heap(BinAddrAdj,Dict0) ->
end_of_heap ->
Dict0;
Fd ->
- case val(Fd) of
+ case bytes(Fd) of
"=" ++ _next_tag ->
put(fd, end_of_heap),
Dict0;
@@ -1498,42 +1513,42 @@ get_portinfo(Fd,Port) ->
case line_head(Fd) of
"Slot" ->
%% stored as integer so we can sort on it
- get_portinfo(Fd,Port#port{slot=list_to_integer(val(Fd))});
+ get_portinfo(Fd,Port#port{slot=list_to_integer(bytes(Fd))});
"Connected" ->
%% stored as pid so we can sort on it
- Connected0 = val(Fd),
+ Connected0 = bytes(Fd),
Connected =
try list_to_pid(Connected0)
catch error:badarg -> Connected0
end,
get_portinfo(Fd,Port#port{connected=Connected});
"Links" ->
- Pids = split_pid_list_no_space(val(Fd)),
+ Pids = split_pid_list_no_space(bytes(Fd)),
Links = [{Pid,Pid} || Pid <- Pids],
get_portinfo(Fd,Port#port{links=Links});
"Registered as" ->
- get_portinfo(Fd,Port#port{name=val(Fd)});
+ get_portinfo(Fd,Port#port{name=string(Fd)});
"Monitors" ->
- Monitors0 = string:tokens(val(Fd),"()"),
+ Monitors0 = string:tokens(bytes(Fd),"()"),
Monitors = [begin
[Pid,Ref] = string:tokens(Mon,","),
{Pid,Pid++" ("++Ref++")"}
end || Mon <- Monitors0],
get_portinfo(Fd,Port#port{monitors=Monitors});
"Port controls linked-in driver" ->
- Str = lists:flatten(["Linked in driver: " | val(Fd)]),
+ Str = lists:flatten(["Linked in driver: " | string(Fd)]),
get_portinfo(Fd,Port#port{controls=Str});
"Port controls forker process" ->
- Str = lists:flatten(["Forker process: " | val(Fd)]),
+ Str = lists:flatten(["Forker process: " | string(Fd)]),
get_portinfo(Fd,Port#port{controls=Str});
"Port controls external process" ->
- Str = lists:flatten(["External proc: " | val(Fd)]),
+ Str = lists:flatten(["External proc: " | string(Fd)]),
get_portinfo(Fd,Port#port{controls=Str});
"Port is a file" ->
- Str = lists:flatten(["File: "| val(Fd)]),
+ Str = lists:flatten(["File: "| string(Fd)]),
get_portinfo(Fd,Port#port{controls=Str});
"Port is UNIX fd not opened by emulator" ->
- Str = lists:flatten(["UNIX fd not opened by emulator: "| val(Fd)]),
+ Str = lists:flatten(["UNIX fd not opened by emulator: "| string(Fd)]),
get_portinfo(Fd,Port#port{controls=Str});
"=" ++ _next_tag ->
Port;
@@ -1566,23 +1581,23 @@ tab_is_named(#ets_table{}) -> "no".
get_etsinfo(Fd,EtsTable = #ets_table{details=Ds},WS) ->
case line_head(Fd) of
"Slot" ->
- get_etsinfo(Fd,EtsTable#ets_table{slot=list_to_integer(val(Fd))},WS);
+ get_etsinfo(Fd,EtsTable#ets_table{slot=list_to_integer(bytes(Fd))},WS);
"Table" ->
- get_etsinfo(Fd,EtsTable#ets_table{id=val(Fd)},WS);
+ get_etsinfo(Fd,EtsTable#ets_table{id=string(Fd)},WS);
"Name" ->
- get_etsinfo(Fd,EtsTable#ets_table{name=val(Fd)},WS);
+ get_etsinfo(Fd,EtsTable#ets_table{name=string(Fd)},WS);
"Ordered set (AVL tree), Elements" ->
skip_rest_of_line(Fd),
get_etsinfo(Fd,EtsTable#ets_table{data_type="tree"},WS);
"Buckets" ->
%% A bug in erl_db_hash.c prints a space after the buckets
%% - need to strip the string to make list_to_integer/1 happy.
- Buckets = list_to_integer(string:strip(val(Fd))),
+ Buckets = list_to_integer(string:strip(bytes(Fd))),
get_etsinfo(Fd,EtsTable#ets_table{buckets=Buckets},WS);
"Objects" ->
- get_etsinfo(Fd,EtsTable#ets_table{size=list_to_integer(val(Fd))},WS);
+ get_etsinfo(Fd,EtsTable#ets_table{size=list_to_integer(bytes(Fd))},WS);
"Words" ->
- Words = list_to_integer(val(Fd)),
+ Words = list_to_integer(bytes(Fd)),
Bytes =
case Words of
-1 -> -1; % probably truncated
@@ -1592,37 +1607,37 @@ get_etsinfo(Fd,EtsTable = #ets_table{details=Ds},WS) ->
"=" ++ _next_tag ->
EtsTable;
"Chain Length Min" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_min=>Val}},WS);
"Chain Length Avg" ->
- Val = try list_to_float(string:strip(val(Fd))) catch _:_ -> "-" end,
+ Val = try list_to_float(string:strip(bytes(Fd))) catch _:_ -> "-" end,
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_avg=>Val}},WS);
"Chain Length Max" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_max=>Val}},WS);
"Chain Length Std Dev" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_stddev=>Val}},WS);
"Chain Length Expected Std Dev" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{chain_exp_stddev=>Val}},WS);
"Fixed" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{fixed=>Val}},WS);
"Type" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{data_type=Val},WS);
"Protection" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{protection=>Val}},WS);
"Compressed" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{compressed=>Val}},WS);
"Write Concurrency" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{write_c=>Val}},WS);
"Read Concurrency" ->
- Val = val(Fd),
+ Val = bytes(Fd),
get_etsinfo(Fd,EtsTable#ets_table{details=Ds#{read_c=>Val}},WS);
Other ->
unexpected(Fd,Other,"ETS info"),
@@ -1672,9 +1687,9 @@ get_timerinfo(Fd,Id) ->
get_timerinfo_1(Fd,Timer) ->
case line_head(Fd) of
"Message" ->
- get_timerinfo_1(Fd,Timer#timer{msg=val(Fd)});
+ get_timerinfo_1(Fd,Timer#timer{msg=string(Fd)});
"Time left" ->
- TimeLeft = list_to_integer(val(Fd) -- " ms"),
+ TimeLeft = list_to_integer(bytes(Fd) -- " ms"),
get_timerinfo_1(Fd,Timer#timer{time=TimeLeft});
"=" ++ _next_tag ->
Timer;
@@ -1743,37 +1758,37 @@ get_nodeinfo(Fd,Channel,Type,Start) ->
get_nodeinfo(Fd,Nod) ->
case line_head(Fd) of
"Name" ->
- get_nodeinfo(Fd,Nod#nod{name=val(Fd)});
+ get_nodeinfo(Fd,Nod#nod{name=bytes(Fd)});
"Controller" ->
- get_nodeinfo(Fd,Nod#nod{controller=val(Fd)});
+ get_nodeinfo(Fd,Nod#nod{controller=bytes(Fd)});
"Creation" ->
%% Throwing away elements like "(refc=1)", which might be
%% printed from a debug compiled emulator.
Creations = lists:flatmap(fun(C) -> try [list_to_integer(C)]
catch error:badarg -> []
end
- end, string:tokens(val(Fd)," ")),
+ end, string:tokens(bytes(Fd)," ")),
get_nodeinfo(Fd,Nod#nod{creation={creations,Creations}});
"Remote link" ->
- Procs = val(Fd), % e.g. "<0.31.0> <4322.54.0>"
+ Procs = bytes(Fd), % e.g. "<0.31.0> <4322.54.0>"
{Local,Remote} = split(Procs),
Str = Local++" <-> "++Remote,
NewRemLinks = [{Local,Str} | Nod#nod.remote_links],
get_nodeinfo(Fd,Nod#nod{remote_links=NewRemLinks});
"Remote monitoring" ->
- Procs = val(Fd), % e.g. "<0.31.0> <4322.54.0>"
+ Procs = bytes(Fd), % e.g. "<0.31.0> <4322.54.0>"
{Local,Remote} = split(Procs),
Str = Local++" -> "++Remote,
NewRemMon = [{Local,Str} | Nod#nod.remote_mon],
get_nodeinfo(Fd,Nod#nod{remote_mon=NewRemMon});
"Remotely monitored by" ->
- Procs = val(Fd), % e.g. "<0.31.0> <4322.54.0>"
+ Procs = bytes(Fd), % e.g. "<0.31.0> <4322.54.0>"
{Local,Remote} = split(Procs),
Str = Local++" <- "++Remote,
NewRemMonBy = [{Local,Str} | Nod#nod.remote_mon_by],
get_nodeinfo(Fd,Nod#nod{remote_mon_by=NewRemMonBy});
"Error" ->
- get_nodeinfo(Fd,Nod#nod{error="ERROR: "++val(Fd)});
+ get_nodeinfo(Fd,Nod#nod{error="ERROR: "++string(Fd)});
"=" ++ _next_tag ->
Nod;
Other ->
@@ -1817,9 +1832,9 @@ loaded_mods(File) ->
get_loaded_mod_totals(Fd,{CC,OC}) ->
case line_head(Fd) of
"Current code" ->
- get_loaded_mod_totals(Fd,{val(Fd),OC});
+ get_loaded_mod_totals(Fd,{bytes(Fd),OC});
"Old code" ->
- get_loaded_mod_totals(Fd,{CC,val(Fd)});
+ get_loaded_mod_totals(Fd,{CC,bytes(Fd)});
"=" ++ _next_tag ->
{CC,OC};
Other ->
@@ -1830,10 +1845,10 @@ get_loaded_mod_totals(Fd,{CC,OC}) ->
get_loaded_mod_info(Fd,LM,Fun) ->
case line_head(Fd) of
"Current size" ->
- CS = list_to_integer(val(Fd)),
+ CS = list_to_integer(bytes(Fd)),
get_loaded_mod_info(Fd,LM#loaded_mod{current_size=CS},Fun);
"Old size" ->
- OS = list_to_integer(val(Fd)),
+ OS = list_to_integer(bytes(Fd)),
get_loaded_mod_info(Fd,LM#loaded_mod{old_size=OS},Fun);
"=" ++ _next_tag ->
LM;
@@ -1849,16 +1864,16 @@ main_modinfo(_Fd,LM,_LineHead) ->
all_modinfo(Fd,LM,LineHead) ->
case LineHead of
"Current attributes" ->
- Str = hex_to_str(val(Fd,"")),
+ Str = hex_to_str(bytes(Fd,"")),
LM#loaded_mod{current_attrib=Str};
"Current compilation info" ->
- Str = hex_to_str(val(Fd,"")),
+ Str = hex_to_str(bytes(Fd,"")),
LM#loaded_mod{current_comp_info=Str};
"Old attributes" ->
- Str = hex_to_str(val(Fd,"")),
+ Str = hex_to_str(bytes(Fd,"")),
LM#loaded_mod{old_attrib=Str};
"Old compilation info" ->
- Str = hex_to_str(val(Fd,"")),
+ Str = hex_to_str(bytes(Fd,"")),
LM#loaded_mod{old_comp_info=Str};
Other ->
unexpected(Fd,Other,"loaded modules info"),
@@ -1868,7 +1883,7 @@ all_modinfo(Fd,LM,LineHead) ->
hex_to_str(Hex) ->
Term = hex_to_term(Hex,[]),
- io_lib:format("~p~n",[Term]).
+ io_lib:format("~tp~n",[Term]).
hex_to_term([X,Y|Hex],Acc) ->
MS = hex_to_dec([X]),
@@ -1909,17 +1924,17 @@ funs(File) ->
get_funinfo(Fd,Fu) ->
case line_head(Fd) of
"Module" ->
- get_funinfo(Fd,Fu#fu{module=val(Fd)});
+ get_funinfo(Fd,Fu#fu{module=bytes(Fd)});
"Uniq" ->
- get_funinfo(Fd,Fu#fu{uniq=list_to_integer(val(Fd))});
+ get_funinfo(Fd,Fu#fu{uniq=list_to_integer(bytes(Fd))});
"Index" ->
- get_funinfo(Fd,Fu#fu{index=list_to_integer(val(Fd))});
+ get_funinfo(Fd,Fu#fu{index=list_to_integer(bytes(Fd))});
"Address" ->
- get_funinfo(Fd,Fu#fu{address=val(Fd)});
+ get_funinfo(Fd,Fu#fu{address=bytes(Fd)});
"Native_address" ->
- get_funinfo(Fd,Fu#fu{native_address=val(Fd)});
+ get_funinfo(Fd,Fu#fu{native_address=bytes(Fd)});
"Refc" ->
- get_funinfo(Fd,Fu#fu{refc=list_to_integer(val(Fd))});
+ get_funinfo(Fd,Fu#fu{refc=list_to_integer(bytes(Fd))});
"=" ++ _next_tag ->
Fu;
Other ->
@@ -1999,7 +2014,7 @@ get_meminfo(Fd,Acc) ->
{eof,_last_line} ->
lists:reverse(Acc);
Key ->
- get_meminfo(Fd,[{list_to_atom(Key),val(Fd)}|Acc])
+ get_meminfo(Fd,[{list_to_atom(Key),bytes(Fd)}|Acc])
end.
%%-----------------------------------------------------------------
@@ -2023,7 +2038,7 @@ get_allocareainfo(Fd,Acc) ->
{eof,_last_line} ->
lists:reverse(Acc);
Key ->
- Val = val(Fd),
+ Val = bytes(Fd),
AllocInfo =
case split(Val) of
{Alloc,[]} ->
@@ -2061,7 +2076,7 @@ get_allocatorinfo1(Fd,Acc,Max) ->
{eof,_last_line} ->
pad_and_reverse(Acc,Max,[]);
Key ->
- Values = get_all_vals(val(Fd),[]),
+ Values = get_all_vals(bytes(Fd),[]),
L = length(Values),
Max1 = if L > Max -> L; true -> Max end,
get_allocatorinfo1(Fd,[{Key,Values}|Acc],Max1)
@@ -2316,13 +2331,13 @@ get_hashtableinfo(Fd,Name,Start) ->
get_hashtableinfo1(Fd,HashTable) ->
case line_head(Fd) of
"size" ->
- get_hashtableinfo1(Fd,HashTable#hash_table{size=val(Fd)});
+ get_hashtableinfo1(Fd,HashTable#hash_table{size=bytes(Fd)});
"used" ->
- get_hashtableinfo1(Fd,HashTable#hash_table{used=val(Fd)});
+ get_hashtableinfo1(Fd,HashTable#hash_table{used=bytes(Fd)});
"objs" ->
- get_hashtableinfo1(Fd,HashTable#hash_table{objs=val(Fd)});
+ get_hashtableinfo1(Fd,HashTable#hash_table{objs=bytes(Fd)});
"depth" ->
- get_hashtableinfo1(Fd,HashTable#hash_table{depth=val(Fd)});
+ get_hashtableinfo1(Fd,HashTable#hash_table{depth=bytes(Fd)});
"=" ++ _next_tag ->
HashTable;
Other ->
@@ -2353,15 +2368,15 @@ get_indextableinfo(Fd,Name,Start) ->
get_indextableinfo1(Fd,IndexTable) ->
case line_head(Fd) of
"size" ->
- get_indextableinfo1(Fd,IndexTable#index_table{size=val(Fd)});
+ get_indextableinfo1(Fd,IndexTable#index_table{size=bytes(Fd)});
"used" ->
- get_indextableinfo1(Fd,IndexTable#index_table{used=val(Fd)});
+ get_indextableinfo1(Fd,IndexTable#index_table{used=bytes(Fd)});
"limit" ->
- get_indextableinfo1(Fd,IndexTable#index_table{limit=val(Fd)});
+ get_indextableinfo1(Fd,IndexTable#index_table{limit=bytes(Fd)});
"rate" ->
- get_indextableinfo1(Fd,IndexTable#index_table{rate=val(Fd)});
+ get_indextableinfo1(Fd,IndexTable#index_table{rate=bytes(Fd)});
"entries" ->
- get_indextableinfo1(Fd,IndexTable#index_table{entries=val(Fd)});
+ get_indextableinfo1(Fd,IndexTable#index_table{entries=bytes(Fd)});
"=" ++ _next_tag ->
IndexTable;
Other ->
@@ -2393,45 +2408,45 @@ get_schedulerinfo(Fd,Name,Start) ->
get_schedulerinfo1(Fd,Sched=#sched{details=Ds}) ->
case line_head(Fd) of
"Current Process" ->
- get_schedulerinfo1(Fd,Sched#sched{process=val(Fd, "None")});
+ get_schedulerinfo1(Fd,Sched#sched{process=bytes(Fd, "None")});
"Current Port" ->
- get_schedulerinfo1(Fd,Sched#sched{port=val(Fd, "None")});
+ get_schedulerinfo1(Fd,Sched#sched{port=bytes(Fd, "None")});
"Run Queue Max Length" ->
- RQMax = list_to_integer(val(Fd)),
+ RQMax = list_to_integer(bytes(Fd)),
RQ = RQMax + Sched#sched.run_q,
get_schedulerinfo1(Fd,Sched#sched{run_q=RQ, details=Ds#{runq_max=>RQMax}});
"Run Queue High Length" ->
- RQHigh = list_to_integer(val(Fd)),
+ RQHigh = list_to_integer(bytes(Fd)),
RQ = RQHigh + Sched#sched.run_q,
get_schedulerinfo1(Fd,Sched#sched{run_q=RQ, details=Ds#{runq_high=>RQHigh}});
"Run Queue Normal Length" ->
- RQNorm = list_to_integer(val(Fd)),
+ RQNorm = list_to_integer(bytes(Fd)),
RQ = RQNorm + Sched#sched.run_q,
get_schedulerinfo1(Fd,Sched#sched{run_q=RQ, details=Ds#{runq_norm=>RQNorm}});
"Run Queue Low Length" ->
- RQLow = list_to_integer(val(Fd)),
+ RQLow = list_to_integer(bytes(Fd)),
RQ = RQLow + Sched#sched.run_q,
get_schedulerinfo1(Fd,Sched#sched{run_q=RQ, details=Ds#{runq_low=>RQLow}});
"Run Queue Port Length" ->
- RQ = list_to_integer(val(Fd)),
+ RQ = list_to_integer(bytes(Fd)),
get_schedulerinfo1(Fd,Sched#sched{port_q=RQ});
"Scheduler Sleep Info Flags" ->
- get_schedulerinfo1(Fd,Sched#sched{details=Ds#{sleep_info=>val(Fd, "None")}});
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{sleep_info=>bytes(Fd, "None")}});
"Scheduler Sleep Info Aux Work" ->
- get_schedulerinfo1(Fd,Sched#sched{details=Ds#{sleep_aux=>val(Fd, "None")}});
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{sleep_aux=>bytes(Fd, "None")}});
"Run Queue Flags" ->
- get_schedulerinfo1(Fd,Sched#sched{details=Ds#{runq_flags=>val(Fd, "None")}});
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{runq_flags=>bytes(Fd, "None")}});
"Current Process State" ->
- get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_state=>val(Fd)}});
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_state=>bytes(Fd)}});
"Current Process Internal State" ->
- get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_int_state=>val(Fd)}});
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_int_state=>bytes(Fd)}});
"Current Process Program counter" ->
- get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_prg_cnt=>val(Fd)}});
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_prg_cnt=>string(Fd)}});
"Current Process CP" ->
- get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_cp=>val(Fd)}});
+ get_schedulerinfo1(Fd,Sched#sched{details=Ds#{currp_cp=>string(Fd)}});
"Current Process Limited Stack Trace" ->
%% If there shall be last in scheduler information block
Sched#sched{details=get_limited_stack(Fd, 0, Ds)};
@@ -2443,7 +2458,7 @@ get_schedulerinfo1(Fd,Sched=#sched{details=Ds}) ->
end.
get_limited_stack(Fd, N, Ds) ->
- case val(Fd) of
+ case string(Fd) of
Addr = "0x" ++ _ ->
get_limited_stack(Fd, N+1, Ds#{{currp_stack, N} => Addr});
"=" ++ _next_tag ->
@@ -2595,7 +2610,7 @@ skip_dist_ext([C|Cs], KeptCs) ->
parse_atom([$A|Line0], D) ->
{N,":"++Line1} = get_hex(Line0),
{Chars, Line} = get_chars(N, Line1),
- {list_to_atom(Chars), Line, D}.
+ {binary_to_atom(list_to_binary(Chars),utf8), Line, D}.
parse_atom_translation_table(0, Line0, As) ->
{list_to_tuple(lists:reverse(As)), Line0};
@@ -2614,7 +2629,7 @@ deref_ptr(Ptr, Line, BinAddrAdj, D0) ->
end_of_heap ->
{['#CDVIncompleteHeap'],Line,D0};
Fd ->
- case val(Fd) of
+ case bytes(Fd) of
"="++_ ->
put(fd, end_of_heap),
deref_ptr(Ptr, Line, BinAddrAdj, D0);
@@ -2759,7 +2774,7 @@ tag_to_atom("scheduler") -> ?scheduler;
tag_to_atom("timer") -> ?timer;
tag_to_atom("visible_node") -> ?visible_node;
tag_to_atom(UnknownTag) ->
- io:format("WARNING: Found unexpected tag:~s~n",[UnknownTag]),
+ io:format("WARNING: Found unexpected tag:~ts~n",[UnknownTag]),
list_to_atom(UnknownTag).
%%%-----------------------------------------------------------------
diff --git a/lib/observer/src/etop_tr.erl b/lib/observer/src/etop_tr.erl
index 8e43f8bb35..1e48fefca4 100644
--- a/lib/observer/src/etop_tr.erl
+++ b/lib/observer/src/etop_tr.erl
@@ -89,14 +89,14 @@ handle_data(Last, {_, Pid, out, _, Time2} = G, Store) ->
end,
New;
false ->
- io:format("Erlang top got garbage ~p~n", [G]),
+ io:format("Erlang top got garbage ~tp~n", [G]),
Last
end;
handle_data(_W, {drop, D}, _) -> %% Error case we are missing data here!
io:format("Erlang top dropped data ~p~n", [D]),
[];
handle_data(Last, G, _) ->
- io:format("Erlang top got garbage ~p~n", [G]),
+ io:format("Erlang top got garbage ~tp~n", [G]),
Last.
elapsed({Me1, S1, Mi1}, {Me2, S2, Mi2}) ->
diff --git a/lib/observer/src/etop_txt.erl b/lib/observer/src/etop_txt.erl
index 183641119a..cd3ec62c13 100644
--- a/lib/observer/src/etop_txt.erl
+++ b/lib/observer/src/etop_txt.erl
@@ -48,7 +48,6 @@ do_update(Prev,Config) ->
do_update(standard_io,Info,Prev,Config).
do_update(Fd,Info,Prev,Config) ->
- Encoding = encoding(Fd),
{Cpu,NProcs,RQ,Clock} = loadinfo(Info,Prev),
io:nl(Fd),
writedoubleline(Fd),
@@ -72,7 +71,7 @@ do_update(Fd,Info,Prev,Config) ->
io:nl(Fd),
writepinfo_header(Fd),
writesingleline(Fd),
- writepinfo(Fd,Info#etop_info.procinfo,Encoding),
+ writepinfo(Fd,Info#etop_info.procinfo,modifier(Fd)),
writedoubleline(Fd),
io:nl(Fd),
Info.
@@ -93,26 +92,27 @@ writepinfo(Fd,[#etop_proc_info{pid=Pid,
cf=MFA,
mq=MQ}
|T],
- Encoding) ->
- io:fwrite(Fd,proc_format(Encoding),
- [Pid,to_list(Name,Encoding),Time,Reds,Mem,MQ,
- formatmfa(MFA,Encoding)]),
- writepinfo(Fd,T,Encoding);
+ Modifier) ->
+ io:fwrite(Fd,proc_format(Modifier),
+ [Pid,to_string(Name,Modifier),Time,Reds,Mem,MQ,
+ to_string(MFA,Modifier)]),
+ writepinfo(Fd,T,Modifier);
writepinfo(_Fd,[],_) ->
ok.
+proc_format(Modifier) ->
+ "~-15w~-20"++Modifier++"s~8w~8w~8w~8w ~-20"++Modifier++"s~n".
-formatmfa({M, F, A},latin1) ->
- io_lib:format("~w:~w/~w",[M, F, A]);
-formatmfa({M, F, A},_) ->
- io_lib:format("~w:~tw/~w",[M, F, A]);
-formatmfa(Other,_) ->
- %% E.g. when running hipe - the current_function for some
- %% processes will be 'undefined'
- io_lib:format("~w",[Other]).
+to_string({M,F,A},Modifier) ->
+ io_lib:format("~w:~"++Modifier++"w/~w",[M,F,A]);
+to_string(Other,Modifier) ->
+ io_lib:format("~"++Modifier++"w",[Other]).
-to_list(Name,_) when is_atom(Name) -> atom_to_list(Name);
-to_list({_M,_F,_A}=MFA,Encoding) -> formatmfa(MFA,Encoding).
+modifier(Device) ->
+ case encoding(Device) of
+ latin1 -> "";
+ _ -> "t"
+ end.
encoding(Device) ->
case io:getopts(Device) of
@@ -122,7 +122,3 @@ encoding(Device) ->
latin1
end.
-proc_format(latin1) ->
- "~-15w~-20s~8w~8w~8w~8w ~-20s~n";
-proc_format(_) ->
- "~-15w~-20ts~8w~8w~8w~8w ~-20ts~n".
diff --git a/lib/observer/src/multitrace.erl b/lib/observer/src/multitrace.erl
index a01eeec6ae..82aec05e0d 100644
--- a/lib/observer/src/multitrace.erl
+++ b/lib/observer/src/multitrace.erl
@@ -103,16 +103,16 @@ print_func(Out,{trace_ts,P,call,{M,F,A},C,Ts},N) ->
io:format(Out,
"~w: ~s~n"
"Process : ~w~n"
- "Call : ~w:~w/~w~n"
- "Arguments : ~p~n"
- "Caller : ~w~n~n",
+ "Call : ~w:~tw/~w~n"
+ "Arguments : ~tp~n"
+ "Caller : ~tw~n~n",
[N,ts(Ts),P,M,F,length(A),A,C]);
print_func(Out,{trace_ts,P,return_from,{M,F,A},R,Ts},N) ->
io:format(Out,
"~w: ~s~n"
"Process : ~w~n"
- "Return from : ~w:~w/~w~n"
- "Return value : ~p~n~n",
+ "Return from : ~w:~tw/~w~n"
+ "Return value : ~tp~n~n",
[N,ts(Ts),P,M,F,A,R]).
@@ -181,7 +181,7 @@ handle_schedule(Out,{trace_ts,P,out,Info,Ts},_TI,S) ->
"out:~n"
"Process : ~w~n"
"Time : ~s~n"
- "Function : ~w~n~n",[P,ts(Ts),Info]),
+ "Function : ~tw~n~n",[P,ts(Ts),Info]),
case lists:keysearch(P,1,S) of
{value,{P,List}} ->
lists:keyreplace(P,1,S,{P,[{out,Ts}|List]});
@@ -193,7 +193,7 @@ handle_schedule(Out,{trace_ts,P,in,Info,Ts},_TI,S) ->
"in:~n"
"Process : ~w~n"
"Time : ~s~n"
- "Function : ~w~n~n",[P,ts(Ts),Info]),
+ "Function : ~tw~n~n",[P,ts(Ts),Info]),
case lists:keysearch(P,1,S) of
{value,{P,List}} ->
lists:keyreplace(P,1,S,{P,[{in,Ts}|List]});
diff --git a/lib/observer/src/observer_alloc_wx.erl b/lib/observer/src/observer_alloc_wx.erl
index ef425f0874..7f4b3dd484 100644
--- a/lib/observer/src/observer_alloc_wx.erl
+++ b/lib/observer/src/observer_alloc_wx.erl
@@ -80,7 +80,7 @@ init([Notebook, Parent, Config]) ->
}
}
catch _:Err ->
- io:format("~p crashed ~p: ~p~n",[?MODULE, Err, erlang:get_stacktrace()]),
+ io:format("~p crashed ~tp: ~tp~n",[?MODULE, Err, erlang:get_stacktrace()]),
{stop, Err}
end.
@@ -183,7 +183,7 @@ handle_info({'EXIT', Old, _}, State = #state{appmon=Old}) ->
{noreply, State#state{active=false, appmon=undefined}};
handle_info(_Event, State) ->
- %% io:format("~p:~p: ~p~n",[?MODULE,?LINE,_Event]),
+ %% io:format("~p:~p: ~tp~n",[?MODULE,?LINE,_Event]),
{noreply, State}.
terminate(_Event, #state{}) ->
diff --git a/lib/observer/src/observer_app_wx.erl b/lib/observer/src/observer_app_wx.erl
index bc4f1fe117..2a481966da 100644
--- a/lib/observer/src/observer_app_wx.erl
+++ b/lib/observer/src/observer_app_wx.erl
@@ -320,7 +320,7 @@ handle_info({'EXIT', _, noconnection}, State) ->
handle_info({'EXIT', _, normal}, State) ->
{noreply, State};
handle_info(_Event, State) ->
- %% io:format("~p:~p: ~p~n",[?MODULE,?LINE,_Event]),
+ %% io:format("~p:~p: ~tp~n",[?MODULE,?LINE,_Event]),
{noreply, State}.
%%%%%%%%%%
diff --git a/lib/observer/src/observer_html_lib.erl b/lib/observer/src/observer_html_lib.erl
index 1f1306c370..3dfcc42ada 100644
--- a/lib/observer/src/observer_html_lib.erl
+++ b/lib/observer/src/observer_html_lib.erl
@@ -142,13 +142,13 @@ dict_table(Tab,{Key0,Value0}, Even) ->
tr(color(Even), [td("VALIGN=center",pre(Key)), td(pre(Value))]).
proc_state(Tab,{Key0,Value0}, Even) ->
- Key = lists:flatten(io_lib:format("~s",[Key0])),
+ Key = lists:flatten(io_lib:format("~ts",[Key0])),
Value = all_or_expand(Tab,Value0),
tr(color(Even), [td("VALIGN=center",Key), td(pre(Value))]).
all_or_expand(Tab,Term) ->
- Preview = io_lib:format("~P",[Term,8]),
- Check = io_lib:format("~P",[Term,100]),
+ Preview = io_lib:format("~tP",[Term,8]),
+ Check = io_lib:format("~tP",[Term,100]),
Exp = Preview=/=Check,
all_or_expand(Tab,Term,Preview,Exp).
all_or_expand(_Tab,Term,Str,false)
@@ -166,13 +166,8 @@ all_or_expand(Tab,Term,Preview,true)
"Click to expand above term")];
all_or_expand(Tab,Bin,_PreviewStr,_Expand)
when is_binary(Bin) ->
- Size = byte_size(Bin),
- PrevSize = min(Size, 10) * 8,
- <<Preview:PrevSize, _/binary>> = Bin,
- Hash = erlang:phash2(Bin),
- Key = {Preview, Size, Hash},
- ets:insert(Tab,{Key,Bin}),
- Term = io_lib:format("~p", [['#OBSBin',Preview,Size,Hash]]),
+ OBSBin = observer_lib:make_obsbin(Bin,Tab),
+ Term = io_lib:format("~tp", [OBSBin]),
href_proc_port(lists:flatten(Term), true).
color(true) -> io_lib:format("BGCOLOR=\"#~2.16.0B~2.16.0B~2.16.0B\"", tuple_to_list(?BG_EVEN));
@@ -339,9 +334,11 @@ href_proc_bin(From, T, Acc, LTB) ->
BinStr =
case string:tokens(OffsetSizePos,",.| \n") of
[Offset,SizeStr,Pos] when From =:= cdv ->
- Id = {list_to_integer(Offset),10,list_to_integer(Pos)},
+ Size = list_to_integer(SizeStr),
+ PreviewSize = min(Size,10),
+ Id = {list_to_integer(Offset),PreviewSize,list_to_integer(Pos)},
{ok,PreviewBin} = crashdump_viewer:expand_binary(Id),
- PreviewStr = preview_string(list_to_integer(SizeStr), PreviewBin),
+ PreviewStr = preview_string(Size, PreviewBin),
if LTB ->
href("TARGET=\"expanded\"",
["#Binary?offset="++Offset++
@@ -351,14 +348,14 @@ href_proc_bin(From, T, Acc, LTB) ->
true ->
PreviewStr
end;
- [Preview,SizeStr,Md5] when From =:= obs ->
+ [PreviewIntStr,SizeStr,Md5] when From =:= obs ->
Size = list_to_integer(SizeStr),
- PrevSize = min(Size, 10) * 8,
- PreviewStr = preview_string(Size,
- <<(list_to_integer(Preview)):PrevSize>>),
+ PreviewInt = list_to_integer(PreviewIntStr),
+ PrevSize = (trunc(math:log2(PreviewInt)/8)+1)*8,
+ PreviewStr = preview_string(Size,<<PreviewInt:PrevSize>>),
if LTB ->
href("TARGET=\"expanded\"",
- ["#OBSBinary?key1="++Preview++
+ ["#OBSBinary?key1="++PreviewIntStr++
"&key2="++SizeStr++
"&key3="++Md5],
PreviewStr);
@@ -372,14 +369,14 @@ href_proc_bin(From, T, Acc, LTB) ->
preview_string(Size, PreviewBin) when Size > 10 ->
["&lt;&lt;",
- remove_lgt(io_lib:format("~p",[PreviewBin])),
+ remove_lgt(io_lib:format("~tp",[PreviewBin])),
"...(",
observer_lib:to_str({bytes,Size}),
")",
"&gt;&gt"];
preview_string(_, PreviewBin) ->
["&lt;&lt;",
- remove_lgt(io_lib:format("~p",[PreviewBin])),
+ remove_lgt(io_lib:format("~tp",[PreviewBin])),
"&gt;&gt"].
remove_lgt(Deep) ->
diff --git a/lib/observer/src/observer_lib.erl b/lib/observer/src/observer_lib.erl
index c7ee294719..463fb5b8ef 100644
--- a/lib/observer/src/observer_lib.erl
+++ b/lib/observer/src/observer_lib.erl
@@ -30,7 +30,8 @@
create_attrs/0,
set_listctrl_col_size/2,
create_status_bar/1,
- html_window/1, html_window/2
+ html_window/1, html_window/2,
+ make_obsbin/2
]).
-include_lib("wx/include/wx.hrl").
@@ -297,8 +298,10 @@ to_str(No) when is_integer(No) ->
integer_to_list(No);
to_str(Float) when is_float(Float) ->
io_lib:format("~.3f", [Float]);
+to_str({trunc, Float}) when is_float(Float) ->
+ float_to_list(Float, [{decimals,0}]);
to_str(Term) ->
- io_lib:format("~w", [Term]).
+ io_lib:format("~tw", [Term]).
create_menus([], _MenuBar, _Type) -> ok;
create_menus(Menus, MenuBar, Type) ->
@@ -518,7 +521,7 @@ link_entry2(Panel,{Target,Str},Cursor) ->
TC.
to_link(RegName={Name, Node}) when is_atom(Name), is_atom(Node) ->
- Str = io_lib:format("{~p,~p}", [Name, Node]),
+ Str = io_lib:format("{~tp,~p}", [Name, Node]),
{RegName, Str};
to_link(TI = {_Target, _Identifier}) ->
TI;
@@ -639,11 +642,11 @@ parse_string(Str) ->
Tokens = case erl_scan:string(Str, 1, [text]) of
{ok, Ts, _} -> Ts;
{error, {_SLine, SMod, SError}, _} ->
- throw(io_lib:format("~s", [SMod:format_error(SError)]))
+ throw(io_lib:format("~ts", [SMod:format_error(SError)]))
end,
case lib:extended_parse_term(Tokens) of
{error, {_PLine, PMod, PError}} ->
- throw(io_lib:format("~s", [PMod:format_error(PError)]));
+ throw(io_lib:format("~ts", [PMod:format_error(PError)]));
Res -> Res
end
catch
@@ -767,3 +770,26 @@ update_progress_text(PD,Text) ->
end.
finish_progress(PD) ->
wxProgressDialog:destroy(PD).
+
+make_obsbin(Bin,Tab) ->
+ Size = byte_size(Bin),
+ Preview =
+ try
+ %% The binary might be a unicode string, in which case we
+ %% don't want to split it in the middle of a grapheme
+ %% cluster - thus trying string:length and slice.
+ PL1 = min(string:length(Bin), 10),
+ PB1 = string:slice(Bin,0,PL1),
+ PS1 = byte_size(PB1) * 8,
+ <<P1:PS1>> = PB1,
+ P1
+ catch _:_ ->
+ %% Probably not a string, so just split anywhere
+ PS2 = min(Size, 10) * 8,
+ <<P2:PS2, _/binary>> = Bin,
+ P2
+ end,
+ Hash = erlang:phash2(Bin),
+ Key = {Preview, Size, Hash},
+ ets:insert(Tab, {Key,Bin}),
+ ['#OBSBin',Preview,Size,Hash].
diff --git a/lib/observer/src/observer_perf_wx.erl b/lib/observer/src/observer_perf_wx.erl
index fcc51310c8..5adfadb16e 100644
--- a/lib/observer/src/observer_perf_wx.erl
+++ b/lib/observer/src/observer_perf_wx.erl
@@ -87,7 +87,7 @@ init([Notebook, Parent, Config]) ->
},
{Panel, State0}
catch _:Err ->
- io:format("~p crashed ~p: ~p~n",[?MODULE, Err, erlang:get_stacktrace()]),
+ io:format("~p crashed ~tp: ~tp~n",[?MODULE, Err, erlang:get_stacktrace()]),
{stop, Err}
end.
@@ -235,7 +235,7 @@ handle_info({'EXIT', Old, _}, State = #state{appmon=Old}) ->
{noreply, State#state{active=false, appmon=undefined}};
handle_info(_Event, State) ->
- %% io:format("~p:~p: ~p~n",[?MODULE,?LINE,_Event]),
+ %% io:format("~p:~p: ~tp~n",[?MODULE,?LINE,_Event]),
{noreply, State}.
%%%%%%%%%%
diff --git a/lib/observer/src/observer_port_wx.erl b/lib/observer/src/observer_port_wx.erl
index 8339267659..5908e99e36 100644
--- a/lib/observer/src/observer_port_wx.erl
+++ b/lib/observer/src/observer_port_wx.erl
@@ -338,7 +338,7 @@ handle_info({info, {port_info_not_available,NodeName}},
{noreply, State};
handle_info({error, Error}, #state{panel=Panel} = State) ->
- Str = io_lib:format("ERROR: ~s~n",[Error]),
+ Str = io_lib:format("ERROR: ~ts~n",[Error]),
observer_lib:display_info_dialog(Panel, Str),
{noreply, State};
diff --git a/lib/observer/src/observer_pro_wx.erl b/lib/observer/src/observer_pro_wx.erl
index 3083297f31..2e5fe0bc1a 100644
--- a/lib/observer/src/observer_pro_wx.erl
+++ b/lib/observer/src/observer_pro_wx.erl
@@ -217,7 +217,7 @@ call(Holder, What) ->
erlang:demonitor(Ref),
Res
after 2000 ->
- io:format("Hanging call ~p~n",[What]),
+ io:format("Hanging call ~tp~n",[What]),
""
end.
@@ -256,7 +256,7 @@ handle_info(not_active, #state{timer=Timer0}=State) ->
{noreply, State#state{timer=Timer}};
handle_info(Info, State) ->
- io:format("~p:~p, Unexpected info: ~p~n", [?MODULE, ?LINE, Info]),
+ io:format("~p:~p, Unexpected info: ~tp~n", [?MODULE, ?LINE, Info]),
{noreply, State}.
terminate(_Reason, #state{holder=Holder}) ->
@@ -273,11 +273,11 @@ handle_call(get_config, _, #state{holder=Holder, timer=Timer}=State) ->
{reply, Conf#{acc=>Accum}, State};
handle_call(Msg, _From, State) ->
- io:format("~p:~p: Unhandled call ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p:~p: Unhandled call ~tp~n",[?MODULE, ?LINE, Msg]),
{reply, ok, State}.
handle_cast(Msg, State) ->
- io:format("~p:~p: Unhandled cast ~p~n", [?MODULE, ?LINE, Msg]),
+ io:format("~p:~p: Unhandled cast ~tp~n", [?MODULE, ?LINE, Msg]),
{noreply, State}.
%%%%%%%%%%%%%%%%%%%%LOOP%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -401,7 +401,7 @@ handle_event(#wx{event=#wxList{type=command_list_item_activated}},
{noreply, State#state{procinfo_menu_pids=Opened2}};
handle_event(Event, State) ->
- io:format("~p:~p: handle event ~p\n", [?MODULE, ?LINE, Event]),
+ io:format("~p:~p: handle event ~tp\n", [?MODULE, ?LINE, Event]),
{noreply, State}.
@@ -559,7 +559,7 @@ table_holder(#holder{info=Info, attrs=Attrs,
%% Node crashed will be noticed soon..
table_holder(S0#holder{backend_pid=undefined});
_What ->
- %% io:format("~p: Table holder got ~p~n",[?MODULE, _What]),
+ %% io:format("~p: Table holder got ~tp~n",[?MODULE, _What]),
table_holder(S0)
end.
diff --git a/lib/observer/src/observer_procinfo.erl b/lib/observer/src/observer_procinfo.erl
index 10decd8b62..963def958b 100644
--- a/lib/observer/src/observer_procinfo.erl
+++ b/lib/observer/src/observer_procinfo.erl
@@ -56,7 +56,7 @@ init([Pid, ParentFrame, Parent]) ->
Table = ets:new(observer_expand,[set,public]),
Title=case observer_wx:try_rpc(node(Pid), erlang, process_info, [Pid, registered_name]) of
[] -> io_lib:format("~p",[Pid]);
- {registered_name, Registered} -> io_lib:format("~p (~p)",[Registered, Pid]);
+ {registered_name, Registered} -> io_lib:format("~tp (~p)",[Registered, Pid]);
undefined -> throw(process_undefined)
end,
Frame=wxFrame:new(ParentFrame, ?wxID_ANY, [atom_to_list(node(Pid)), $:, Title],
@@ -171,7 +171,7 @@ handle_info({get_debug_info, From}, State = #state{notebook=Notebook}) ->
From ! {procinfo_debug, Notebook},
{noreply, State};
handle_info(_Info, State) ->
- %% io:format("~p: ~p, Handle info: ~p~n", [?MODULE, ?LINE, Info]),
+ %% io:format("~p: ~p, Handle info: ~tp~n", [?MODULE, ?LINE, Info]),
{noreply, State}.
handle_call(Call, From, _State) ->
@@ -263,7 +263,7 @@ init_stack_page(Parent, Pid) ->
wxListCtrl:setItem(LCtrl, Row, 0, observer_lib:to_str({M,F,A})),
FileLine = case Info of
[{file,File},{line,Line}] ->
- io_lib:format("~s:~w", [File,Line]);
+ io_lib:format("~ts:~w", [File,Line]);
_ ->
[]
end,
@@ -487,5 +487,5 @@ io_request({put_chars, Encoding, Module, Function, Args}, State) ->
{error, {error, Function}, State}
end;
io_request(_Req, State) ->
- %% io:format("~p: Unknown req: ~p ~n",[?LINE, _Req]),
+ %% io:format("~p: Unknown req: ~tp ~n",[?LINE, _Req]),
{ok, {error, request}, State}.
diff --git a/lib/observer/src/observer_sys_wx.erl b/lib/observer/src/observer_sys_wx.erl
index db86c05bed..8c2ffd77b4 100644
--- a/lib/observer/src/observer_sys_wx.erl
+++ b/lib/observer/src/observer_sys_wx.erl
@@ -48,7 +48,7 @@ start_link(Notebook, Parent, Config) ->
init([Notebook, Parent, Config]) ->
SysInfo = observer_backend:sys_info(),
- {Sys, Mem, Cpu, Stats} = info_fields(),
+ {Sys, Mem, Cpu, Stats, Limits} = info_fields(),
Panel = wxPanel:new(Notebook),
Sizer = wxBoxSizer:new(?wxVERTICAL),
HSizer0 = wxBoxSizer:new(?wxHORIZONTAL),
@@ -63,17 +63,26 @@ init([Notebook, Parent, Config]) ->
wxSizer:add(HSizer1, FPanel2, [{flag, ?wxEXPAND}, {proportion, 1}]),
wxSizer:add(HSizer1, FPanel3, [{flag, ?wxEXPAND}, {proportion, 1}]),
+ HSizer2 = wxBoxSizer:new(?wxHORIZONTAL),
+ {FPanel4, _FSizer4, Fields4} = observer_lib:display_info(Panel, observer_lib:fill_info(Limits, SysInfo)),
+ wxSizer:add(HSizer2, FPanel4, [{flag, ?wxEXPAND}, {proportion, 1}]),
+
+
BorderFlags = ?wxLEFT bor ?wxRIGHT,
wxSizer:add(Sizer, HSizer0, [{flag, ?wxEXPAND bor BorderFlags bor ?wxTOP},
{proportion, 0}, {border, 5}]),
wxSizer:add(Sizer, HSizer1, [{flag, ?wxEXPAND bor BorderFlags bor ?wxBOTTOM},
{proportion, 0}, {border, 5}]),
+ wxSizer:add(Sizer, HSizer2, [{flag, ?wxEXPAND bor BorderFlags bor ?wxBOTTOM},
+ {proportion, 0}, {border, 5}]),
+
wxPanel:setSizer(Panel, Sizer),
Timer = observer_lib:start_timer(Config, 10),
{Panel, #sys_wx_state{parent=Parent,
parent_notebook=Notebook,
panel=Panel, sizer=Sizer,
- timer=Timer, fields=Fields0 ++ Fields1++Fields2++Fields3}}.
+ timer=Timer, fields=Fields0 ++ Fields1++Fields2++Fields3++Fields4}}.
+
create_sys_menu(Parent) ->
View = {"View", [#create_menu{id = ?ID_REFRESH, text = "Refresh\tCtrl-R"},
@@ -83,14 +92,40 @@ create_sys_menu(Parent) ->
update_syspage(#sys_wx_state{node = undefined}) -> ignore;
update_syspage(#sys_wx_state{node = Node, fields=Fields, sizer=Sizer}) ->
SysInfo = observer_wx:try_rpc(Node, observer_backend, sys_info, []),
- {Sys, Mem, Cpu, Stats} = info_fields(),
+ {Sys, Mem, Cpu, Stats, Limits} = info_fields(),
observer_lib:update_info(Fields,
observer_lib:fill_info(Sys, SysInfo) ++
observer_lib:fill_info(Mem, SysInfo) ++
observer_lib:fill_info(Cpu, SysInfo) ++
- observer_lib:fill_info(Stats, SysInfo)),
+ observer_lib:fill_info(Stats, SysInfo)++
+ observer_lib:fill_info(Limits, SysInfo)),
+
wxSizer:layout(Sizer).
+
+maybe_convert(undefined) -> "Not available";
+maybe_convert(V) -> observer_lib:to_str(V).
+
+get_dist_buf_busy_limit_info() ->
+ fun(Data) ->
+ maybe_convert(proplists:get_value(dist_buf_busy_limit, Data))
+ end.
+
+get_limit_count_info(Count, Limit) ->
+ fun(Data) ->
+ C = proplists:get_value(Count, Data),
+ L = proplists:get_value(Limit, Data),
+ lists:flatten(
+ io_lib:format("~s / ~s ~s",
+ [maybe_convert(C), maybe_convert(L),
+ if
+ C =:= undefined -> "";
+ L =:= undefined -> "";
+ true -> io_lib:format("(~s % used)",[observer_lib:to_str({trunc, (C / L) *100})])
+ end]))
+ end.
+
+
info_fields() ->
Sys = [{"System and Architecture",
[{"System Version", otp_release},
@@ -122,14 +157,20 @@ info_fields() ->
]}],
Stats = [{"Statistics", right,
[{"Up time", {time_ms, uptime}},
- {"Max Processes", process_limit},
- {"Processes", process_count},
{"Run Queue", run_queue},
{"IO Input", {bytes, io_input}},
{"IO Output", {bytes, io_output}}
]}
],
- {Sys, Mem, Cpu, Stats}.
+ Limits = [{"System statistics / limit",
+ [{"Atoms", get_limit_count_info(atom_count, atom_limit)},
+ {"Processes", get_limit_count_info(process_count, process_limit)},
+ {"Ports", get_limit_count_info(port_count, port_limit)},
+ {"ETS", get_limit_count_info(ets_count, ets_limit)},
+ {"Distribution buffer busy limit", get_dist_buf_busy_limit_info()}
+ ]}],
+ {Sys, Mem, Cpu, Stats, Limits}.
+
%%%%%%%%%%%%%%%%%%%%%%% Callbacks %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -158,7 +199,7 @@ handle_info(not_active, #sys_wx_state{timer = Timer} = State) ->
{noreply, State#sys_wx_state{timer = observer_lib:stop_timer(Timer)}};
handle_info(Info, State) ->
- io:format("~p:~p: Unhandled info: ~p~n", [?MODULE, ?LINE, Info]),
+ io:format("~p:~p: Unhandled info: ~tp~n", [?MODULE, ?LINE, Info]),
{noreply, State}.
terminate(_Reason, _State) ->
@@ -171,11 +212,11 @@ handle_call(get_config, _, #sys_wx_state{timer=Timer}=State) ->
{reply, observer_lib:timer_config(Timer), State};
handle_call(Msg, _From, State) ->
- io:format("~p~p: Unhandled Call ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p~p: Unhandled Call ~tp~n",[?MODULE, ?LINE, Msg]),
{reply, ok, State}.
handle_cast(Msg, State) ->
- io:format("~p~p: Unhandled cast ~p~n",[?MODULE, ?LINE, Msg]),
+ io:format("~p~p: Unhandled cast ~tp~n",[?MODULE, ?LINE, Msg]),
{noreply, State}.
handle_event(#wx{id = ?ID_REFRESH, event = #wxCommand{type = command_menu_selected}},
@@ -194,5 +235,5 @@ handle_event(#wx{id = ?ID_REFRESH_INTERVAL,
{noreply, State#sys_wx_state{timer=Timer}};
handle_event(Event, State) ->
- io:format("~p:~p: Unhandled event ~p\n", [?MODULE,?LINE,Event]),
+ io:format("~p:~p: Unhandled event ~tp\n", [?MODULE,?LINE,Event]),
{noreply, State}.
diff --git a/lib/observer/src/observer_trace_wx.erl b/lib/observer/src/observer_trace_wx.erl
index b960c61ff0..8127248262 100644
--- a/lib/observer/src/observer_trace_wx.erl
+++ b/lib/observer/src/observer_trace_wx.erl
@@ -683,7 +683,7 @@ handle_event(#wx{id=?REMOVE_NODES}, #state{n_view=Nview, nodes=Ns0} = State) ->
{noreply, State#state{nodes = Ns}};
handle_event(#wx{id=ID, event = What}, State) ->
- io:format("~p:~p: Unhandled event: ~p, ~p ~n", [?MODULE, ?LINE, ID, What]),
+ io:format("~p:~p: Unhandled event: ~p, ~tp ~n", [?MODULE, ?LINE, ID, What]),
{noreply, State}.
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -729,7 +729,7 @@ handle_info({update_ms, NewMs}, State) ->
{noreply, State#state{match_specs=NewMs}};
handle_info(Any, State) ->
- io:format("~p~p: received unexpected message: ~p\n", [?MODULE, self(), Any]),
+ io:format("~p~p: received unexpected message: ~tp\n", [?MODULE, self(), Any]),
{noreply, State}.
terminate(_Reason, #state{nodes=_Nodes}) ->
@@ -1046,33 +1046,33 @@ format_trace(Trace, Size, TS0={_,_,MS}) ->
case element(4, Trace) of
{dbg,ok} -> "";
Message ->
- io_lib:format("~s (~100p) << ~100p~n", [TS,From,Message])
+ io_lib:format("~s (~100p) << ~100tp~n", [TS,From,Message])
end;
'send' ->
Message = element(4, Trace),
To = element(5, Trace),
- io_lib:format("~s (~100p) ~100p ! ~100p~n", [TS,From,To,Message]);
+ io_lib:format("~s (~100p) ~100p ! ~100tp~n", [TS,From,To,Message]);
call ->
case element(4, Trace) of
MFA when Size == 5 ->
Message = element(5, Trace),
- io_lib:format("~s (~100p) call ~s (~100p) ~n", [TS,From,ffunc(MFA),Message]);
+ io_lib:format("~s (~100p) call ~ts (~100tp) ~n", [TS,From,ffunc(MFA),Message]);
MFA ->
- io_lib:format("~s (~100p) call ~s~n", [TS,From,ffunc(MFA)])
+ io_lib:format("~s (~100p) call ~ts~n", [TS,From,ffunc(MFA)])
end;
return_from ->
MFA = element(4, Trace),
Ret = element(5, Trace),
- io_lib:format("~s (~100p) returned from ~s -> ~100p~n", [TS,From,ffunc(MFA),Ret]);
+ io_lib:format("~s (~100p) returned from ~ts -> ~100tp~n", [TS,From,ffunc(MFA),Ret]);
return_to ->
MFA = element(4, Trace),
- io_lib:format("~s (~100p) returning to ~s~n", [TS,From,ffunc(MFA)]);
+ io_lib:format("~s (~100p) returning to ~ts~n", [TS,From,ffunc(MFA)]);
spawn when Size == 5 ->
Pid = element(4, Trace),
MFA = element(5, Trace),
- io_lib:format("~s (~100p) spawn ~100p as ~s~n", [TS,From,Pid,ffunc(MFA)]);
+ io_lib:format("~s (~100p) spawn ~100p as ~ts~n", [TS,From,Pid,ffunc(MFA)]);
Op ->
- io_lib:format("~s (~100p) ~100p ~s~n", [TS,From,Op,ftup(Trace,4,Size)])
+ io_lib:format("~s (~100p) ~100p ~ts~n", [TS,From,Op,ftup(Trace,4,Size)])
end.
%%% These f* functions returns non-flat strings
@@ -1080,24 +1080,24 @@ format_trace(Trace, Size, TS0={_,_,MS}) ->
%% {M,F,[A1, A2, ..., AN]} -> "M:F(A1, A2, ..., AN)"
%% {M,F,A} -> "M:F/A"
ffunc({M,F,Argl}) when is_list(Argl) ->
- io_lib:format("~100p:~100p(~s)", [M, F, fargs(Argl)]);
+ io_lib:format("~100p:~100tp(~ts)", [M, F, fargs(Argl)]);
ffunc({M,F,Arity}) ->
- io_lib:format("~100p:~100p/~100p", [M,F,Arity]);
-ffunc(X) -> io_lib:format("~100p", [X]).
+ io_lib:format("~100p:~100tp/~100p", [M,F,Arity]);
+ffunc(X) -> io_lib:format("~100tp", [X]).
%% Integer -> "Integer"
%% [A1, A2, ..., AN] -> "A1, A2, ..., AN"
fargs(Arity) when is_integer(Arity) -> integer_to_list(Arity);
fargs([]) -> [];
-fargs([A]) -> io_lib:format("~100p", [A]); %% last arg
-fargs([A|Args]) -> [io_lib:format("~100p,", [A]) | fargs(Args)];
-fargs(A) -> io_lib:format("~100p", [A]). % last or only arg
+fargs([A]) -> io_lib:format("~100tp", [A]); %% last arg
+fargs([A|Args]) -> [io_lib:format("~100tp,", [A]) | fargs(Args)];
+fargs(A) -> io_lib:format("~100tp", [A]). % last or only arg
%% {A_1, A_2, ..., A_N} -> "A_Index A_Index+1 ... A_Size"
ftup(Trace, Index, Index) ->
- io_lib:format("~100p", [element(Index, Trace)]);
+ io_lib:format("~100tp", [element(Index, Trace)]);
ftup(Trace, Index, Size) ->
- [io_lib:format("~100p ", [element(Index, Trace)])
+ [io_lib:format("~100tp ", [element(Index, Trace)])
| ftup(Trace, Index+1, Size)].
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -1125,18 +1125,19 @@ get_config(#state{def_proc_flags = ProcFlags,
write_file(Frame, Filename, Config) ->
Str =
- ["%%%\n%%% This file is generated by Observer\n",
+ ["%%% ",epp:encoding_to_string(utf8), "\n"
+ "%%%\n%%% This file is generated by Observer\n",
"%%%\n%%% DO NOT EDIT!\n%%%\n",
- [io_lib:format("~p.~n",[MSTerm]) ||
+ [io_lib:format("~tp.~n",[MSTerm]) ||
MSTerm <- proplists:get_value(match_specs, Config)],
io_lib:format("~p.~n",[lists:keyfind(procflags, 1, Config)]),
io_lib:format("~p.~n",[lists:keyfind(portflags, 1, Config)]),
- io_lib:format("~p.~n",[lists:keyfind(output, 1, Config)]),
- [io_lib:format("~p.~n",[ModuleTerm]) ||
+ io_lib:format("~tp.~n",[lists:keyfind(output, 1, Config)]),
+ [io_lib:format("~tp.~n",[ModuleTerm]) ||
ModuleTerm <- proplists:get_value(trace_p, Config)]
],
- case file:write_file(Filename, list_to_binary(Str)) of
+ case file:write_file(Filename, unicode:characters_to_binary(Str)) of
ok ->
success;
{error, Reason} ->
@@ -1200,7 +1201,7 @@ make_ms(MS) ->
make_ms(Name,Term,FunStr).
make_ms(Name, Term, FunStr) ->
- #match_spec{name=Name, term=Term, str=io_lib:format("~w", Term), func = FunStr}.
+ #match_spec{name=Name, term=Term, str=io_lib:format("~tw", Term), func = FunStr}.
parse_tp({tp, Mod, FAs}, State) ->
Patterns = [#tpattern{m=Mod,fa={F,A}, ms=make_ms(List)} ||
diff --git a/lib/observer/src/observer_traceoptions_wx.erl b/lib/observer/src/observer_traceoptions_wx.erl
index 285c298c4b..4f46426cf6 100644
--- a/lib/observer/src/observer_traceoptions_wx.erl
+++ b/lib/observer/src/observer_traceoptions_wx.erl
@@ -487,7 +487,7 @@ edit_ms(TextCtrl, Label0, Parent) ->
_ -> Label0
end,
#match_spec{name=Label, term=MatchSpec,
- str=io_lib:format("~w",[MatchSpec]),
+ str=io_lib:format("~tw",[MatchSpec]),
func=Str}
catch
throw:cancel ->
@@ -511,18 +511,18 @@ ms_from_string(Str) ->
Tokens = case erl_scan:string(Str) of
{ok, Ts, _} -> Ts;
{error, {SLine, SMod, SError}, _} ->
- throw(io_lib:format("~w: ~s", [SLine,SMod:format_error(SError)]))
+ throw(io_lib:format("~w: ~ts", [SLine,SMod:format_error(SError)]))
end,
Exprs = case erl_parse:parse_exprs(Tokens) of
{ok, T} -> T;
{error, {PLine, PMod, PError}} ->
- throw(io_lib:format("~w: ~s", [PLine,PMod:format_error(PError)]))
+ throw(io_lib:format("~w: ~ts", [PLine,PMod:format_error(PError)]))
end,
Term = case Exprs of
[{'fun', _, {clauses, Clauses}}|_] ->
case ms_transform:transform_from_shell(dbg,Clauses,orddict:new()) of
{error, [{_,[{MSLine,Mod,MSInfo}]}],_} ->
- throw(io_lib:format("~w: ~p", [MSLine,Mod:format_error(MSInfo)]));
+ throw(io_lib:format("~w: ~tp", [MSLine,Mod:format_error(MSInfo)]));
{error, _} ->
throw("Could not convert fun() to match spec");
Ms ->
@@ -536,7 +536,7 @@ ms_from_string(Str) ->
{error, List} -> throw([[Error, $\n] || {_, Error} <- List])
end
catch error:_Reason ->
- %% io:format("Bad term: ~s~n ~p in ~p~n", [Str, _Reason, erlang:get_stacktrace()]),
+ %% io:format("Bad term: ~ts~n ~tp in ~tp~n", [Str, _Reason, erlang:get_stacktrace()]),
throw("Invalid term")
end.
@@ -556,7 +556,8 @@ filter_listbox_data(Input, Data, ListBox) ->
filter_listbox_data(Input, Data, ListBox, true).
filter_listbox_data(Input, Data, ListBox, AddClientData) ->
- FilteredData = [X || X = {Str, _} <- Data, re:run(Str, Input) =/= nomatch],
+ FilteredData = [X || X = {Str, _} <- Data,
+ re:run(Str, Input, [unicode]) =/= nomatch],
wxListBox:clear(ListBox),
wxListBox:appendStrings(ListBox, [Str || {Str,_} <- FilteredData]),
AddClientData andalso
@@ -648,9 +649,9 @@ parse_function_names(Choices) ->
parse_function_names([], Acc) ->
lists:reverse(Acc);
parse_function_names([{H, Term}|T], Acc) ->
- IsFun = re:run(H, ".*-fun-\\d*?-"),
- IsLc = re:run(H, ".*-lc\\$\\^\\d*?/\\d*?-\\d*?-"),
- IsLbc = re:run(H, ".*-lbc\\$\\^\\d*?/\\d*?-\\d*?-"),
+ IsFun = re:run(H, ".*-fun-\\d*?-", [unicode,ucp]),
+ IsLc = re:run(H, ".*-lc\\$\\^\\d*?/\\d*?-\\d*?-", [unicode,ucp]),
+ IsLbc = re:run(H, ".*-lbc\\$\\^\\d*?/\\d*?-\\d*?-", [unicode,ucp]),
Parsed =
if IsFun =/= nomatch -> "Fun: " ++ H;
IsLc =/= nomatch -> "List comprehension: " ++ H;
diff --git a/lib/observer/src/observer_tv_table.erl b/lib/observer/src/observer_tv_table.erl
index 789e060cfb..d6dcee2cda 100644
--- a/lib/observer/src/observer_tv_table.erl
+++ b/lib/observer/src/observer_tv_table.erl
@@ -258,7 +258,7 @@ handle_event(#wx{event=#wxList{type=command_list_item_selected, itemIndex=Index}
State = #state{pid=Pid, grid=Grid, status=StatusBar}) ->
N = wxListCtrl:getItemCount(Grid),
Str = get_row(Pid, Index, all),
- wxStatusBar:setStatusText(StatusBar, io_lib:format("Objects: ~w: ~s",[N, Str])),
+ wxStatusBar:setStatusText(StatusBar, io_lib:format("Objects: ~w: ~ts",[N, Str])),
{noreply, State#state{selected=Index}};
handle_event(#wx{event=#wxList{type=command_list_item_activated, itemIndex=Index}},
@@ -278,7 +278,7 @@ handle_event(#wx{id=?ID_DELETE},
State = #state{grid=Grid, pid=Pid, status=StatusBar, selected=Index}) ->
Str = get_row(Pid, Index, all),
Pid ! {delete, Index},
- wxStatusBar:setStatusText(StatusBar, io_lib:format("Deleted object: ~s",[Str])),
+ wxStatusBar:setStatusText(StatusBar, io_lib:format("Deleted object: ~ts",[Str])),
wxListCtrl:setItemState(Grid, Index, 0, ?wxLIST_STATE_FOCUSED),
{noreply, State#state{selected=undefined}};
@@ -338,7 +338,7 @@ handle_event(#wx{id=?SEARCH_ENTRY, event=#wxCommand{type=command_text_enter,cmdS
Pid ! {mark_search_hit, false},
case search(Pid, Str, Pos, Dir, Case) of
false ->
- wxStatusBar:setStatusText(SB, io_lib:format("Not found (regexp): ~s",[Str])),
+ wxStatusBar:setStatusText(SB, io_lib:format("Not found (regexp): ~ts",[Str])),
Pid ! {mark_search_hit, Find#find.start},
wxListCtrl:refreshItem(Grid, Find#find.start),
{noreply, State#state{search=Search#search{find=Find#find{found=false}}}};
@@ -372,7 +372,7 @@ handle_event(#wx{id=?SEARCH_ENTRY, event=#wxCommand{cmdString=Str}},
Pid ! {mark_search_hit, false},
case search(Pid, Str, Cont#find.start, Dir, Case) of
false ->
- wxStatusBar:setStatusText(SB, io_lib:format("Not found (regexp): ~s",[Str])),
+ wxStatusBar:setStatusText(SB, io_lib:format("Not found (regexp): ~ts",[Str])),
{noreply, State};
Row ->
wxListCtrl:ensureVisible(Grid, Row),
@@ -395,19 +395,19 @@ handle_event(#wx{id=?ID_REFRESH_INTERVAL},
{noreply, State#state{timer=Timer}};
handle_event(_Event, State) ->
- %io:format("~p:~p, handle event ~p\n", [?MODULE, ?LINE, Event]),
+ %io:format("~p:~p, handle event ~tp\n", [?MODULE, ?LINE, Event]),
{noreply, State}.
handle_sync_event(_Event, _Obj, _State) ->
- %io:format("~p:~p, handle sync_event ~p\n", [?MODULE, ?LINE, Event]),
+ %io:format("~p:~p, handle sync_event ~tp\n", [?MODULE, ?LINE, Event]),
ok.
handle_call(_Event, _From, State) ->
- %io:format("~p:~p, handle call (~p) ~p\n", [?MODULE, ?LINE, From, Event]),
+ %io:format("~p:~p, handle call (~p) ~tp\n", [?MODULE, ?LINE, From, Event]),
{noreply, State}.
handle_cast(_Event, State) ->
- %io:format("~p:~p, handle cast ~p\n", [?MODULE, ?LINE, Event]),
+ %io:format("~p:~p, handle cast ~tp\n", [?MODULE, ?LINE, Event]),
{noreply, State}.
handle_info({no_rows, N}, State = #state{grid=Grid, status=StatusBar}) ->
@@ -433,7 +433,7 @@ handle_info(refresh_interval, State = #state{pid=Pid}) ->
handle_info({error, Error}, State = #state{frame=Frame}) ->
ErrorStr =
try io_lib:format("~ts", [Error]), Error
- catch _:_ -> io_lib:format("~p", [Error])
+ catch _:_ -> io_lib:format("~tp", [Error])
end,
Dlg = wxMessageDialog:new(Frame, ErrorStr),
wxMessageDialog:showModal(Dlg),
@@ -441,7 +441,7 @@ handle_info({error, Error}, State = #state{frame=Frame}) ->
{noreply, State};
handle_info(_Event, State) ->
- %% io:format("~p:~p, handle info ~p\n", [?MODULE, ?LINE, _Event]),
+ %% io:format("~p:~p, handle info ~tp\n", [?MODULE, ?LINE, _Event]),
{noreply, State}.
terminate(_Event, #state{pid=Pid, attrs=Attrs}) ->
@@ -554,7 +554,7 @@ table_holder(S0 = #holder{parent=Parent, pid=Pid, table=Table}) ->
edit_row(Row, Term, S0),
table_holder(S0);
What ->
- io:format("Table holder got ~p~n",[What]),
+ io:format("Table holder got ~tp~n",[What]),
Parent ! {refresh, 0, S0#holder.n-1},
table_holder(S0)
end.
@@ -641,7 +641,7 @@ search([Str, Row, Dir0, CaseSens],
true -> 1;
false -> -1
end,
- Res = case re:compile(Str, Opt) of
+ Res = case re:compile(Str, [unicode|Opt]) of
{ok, Re} -> re_search(Row, Dir, N, Re, Table);
{error, _} -> false
end,
@@ -665,7 +665,7 @@ get_row(From, Row, Col, Table) ->
[Object|_] when Col =:= all ->
From ! {self(), format(Object)};
[Object|_] when Col =:= all_multiline ->
- From ! {self(), io_lib:format("~p", [Object])};
+ From ! {self(), io_lib:format("~tp", [Object])};
[Object|_] when Col =:= term ->
From ! {self(), Object};
[Object|_] when tuple_size(Object) >= Col ->
@@ -801,7 +801,7 @@ format(Bin) when is_binary(Bin), byte_size(Bin) > 100 ->
io_lib:format("<<#Bin:~w>>", [byte_size(Bin)]);
format(Bin) when is_binary(Bin) ->
try
- true = printable_list(unicode:characters_to_list(Bin)),
+ true = io_lib:printable_list(unicode:characters_to_list(Bin)),
io_lib:format("<<\"~ts\">>", [Bin])
catch _:_ ->
io_lib:format("~w", [Bin])
@@ -809,7 +809,7 @@ format(Bin) when is_binary(Bin) ->
format(Float) when is_float(Float) ->
io_lib:format("~.3g", [Float]);
format(Term) ->
- io_lib:format("~w", [Term]).
+ io_lib:format("~tw", [Term]).
format_tuple(Tuple, I, Max) when I < Max ->
[format(element(I, Tuple)), $,|format_tuple(Tuple, I+1, Max)];
@@ -820,7 +820,7 @@ format_tuple(_Tuple, 1, 0) ->
format_list([]) -> "[]";
format_list(List) ->
- case printable_list(List) of
+ case io_lib:printable_list(List) of
true -> io_lib:format("\"~ts\"", [map_printable_list(List)]);
false -> [$[ | make_list(List)]
end.
@@ -849,26 +849,3 @@ map_printable_list([$\e|Cs]) ->
map_printable_list([]) -> [];
map_printable_list([C|Cs]) ->
[C|map_printable_list(Cs)].
-
-%% printable_list([Char]) -> bool()
-%% Return true if CharList is a list of printable characters, else
-%% false.
-
-printable_list([C|Cs]) when is_integer(C), C >= $ , C =< 255 ->
- printable_list(Cs);
-printable_list([$\n|Cs]) ->
- printable_list(Cs);
-printable_list([$\r|Cs]) ->
- printable_list(Cs);
-printable_list([$\t|Cs]) ->
- printable_list(Cs);
-printable_list([$\v|Cs]) ->
- printable_list(Cs);
-printable_list([$\b|Cs]) ->
- printable_list(Cs);
-printable_list([$\f|Cs]) ->
- printable_list(Cs);
-printable_list([$\e|Cs]) ->
- printable_list(Cs);
-printable_list([]) -> true;
-printable_list(_Other) -> false. %Everything else is false
diff --git a/lib/observer/src/observer_tv_wx.erl b/lib/observer/src/observer_tv_wx.erl
index 9564bdfa1c..e16f3cab6b 100644
--- a/lib/observer/src/observer_tv_wx.erl
+++ b/lib/observer/src/observer_tv_wx.erl
@@ -252,7 +252,7 @@ handle_info(not_active, State = #state{timer = Timer0}) ->
{noreply, State#state{timer=Timer}};
handle_info({error, Error}, #state{panel=Panel,opt=Opt}=State) ->
- Str = io_lib:format("ERROR: ~s~n",[Error]),
+ Str = io_lib:format("ERROR: ~ts~n",[Error]),
observer_lib:display_info_dialog(Panel,Str),
case Opt#opt.type of
mnesia -> wxMenuBar:check(observer_wx:get_menubar(), ?ID_ETS, true);
diff --git a/lib/observer/src/observer_wx.erl b/lib/observer/src/observer_wx.erl
index 9b9e80f479..be93b1d5f1 100644
--- a/lib/observer/src/observer_wx.erl
+++ b/lib/observer/src/observer_wx.erl
@@ -459,7 +459,7 @@ handle_info({'EXIT', Pid, Reason}, State) ->
normal ->
{noreply, State};
_ ->
- io:format("Observer: Child (~s) crashed exiting: ~p ~p~n",
+ io:format("Observer: Child (~s) crashed exiting: ~p ~tp~n",
[pid2panel(Pid, State), Pid, Reason]),
{stop, normal, State}
end;
@@ -504,7 +504,7 @@ save_config(Panels) ->
File = config_file(),
case filelib:ensure_dir(File) of
ok ->
- Format = [io_lib:format("~p.~n",[Conf]) || Conf <- Configs],
+ Format = [io_lib:format("~tp.~n",[Conf]) || Conf <- Configs],
_ = file:write_file(File, Format);
_ ->
ignore
diff --git a/lib/observer/src/ttb.erl b/lib/observer/src/ttb.erl
index 09b0bc6710..940fdc9818 100644
--- a/lib/observer/src/ttb.erl
+++ b/lib/observer/src/ttb.erl
@@ -635,7 +635,7 @@ stop(Opts) when is_list(Opts) ->
ok;
{_, {stopped, _}} ->
%% Printout moved out of the ttb loop to avoid occasional deadlock
- io:format("Stored logs in ~s~n", [element(2, Result)]);
+ io:format("Stored logs in ~ts~n", [element(2, Result)]);
{_, _} ->
ok
end,
@@ -792,7 +792,7 @@ do_stop({FetchOrFormat, UserDir}, Sender, NodeInfo, SessionInfo) ->
write_config(?last_config, all),
Localhost = host(node()),
Dir = get_fetch_dir(UserDir, proplists:get_value(logfile, SessionInfo)),
- file:make_dir(Dir),
+ ok = filelib:ensure_dir(filename:join(Dir,"*")),
%% The nodes are traversed twice here because
%% the meta tracing in observer_backend must be
%% stopped before dbg is stopped, and dbg must
@@ -900,21 +900,29 @@ fetch_report(Localhost, Dir, Node, MetaFile) ->
fetch(Localhost,Dir,Node,MetaFile) ->
case (host(Node) == Localhost) orelse is_local(MetaFile) of
- true -> % same host, just move the files
+ true -> % same host, just move the files
Files = get_filenames(Node,MetaFile),
lists:foreach(
- fun(File0) ->
- Dest = filename:join(Dir,filename:basename(File0)),
- file:rename(File0, Dest)
- end,
- Files);
+ fun(File0) ->
+ Dest = filename:join(Dir,filename:basename(File0)),
+ file:rename(File0, Dest)
+ end,
+ Files);
false ->
{ok, LSock} = gen_tcp:listen(0, [binary,{packet,2},{active,false}]),
{ok,Port} = inet:port(LSock),
- rpc:cast(Node,observer_backend,ttb_fetch,
- [MetaFile,{Port,Localhost}]),
+ Enc = file:native_name_encoding(),
+ Args =
+ case rpc:call(Node,erlang,function_exported,
+ [observer_backend,ttb_fetch,3]) of
+ true ->
+ [MetaFile,{Port,Localhost},Enc];
+ false ->
+ [MetaFile,{Port,Localhost}]
+ end,
+ rpc:cast(Node,observer_backend,ttb_fetch,Args),
{ok, Sock} = gen_tcp:accept(LSock),
- receive_files(Dir,Sock,undefined),
+ receive_files(Dir,Sock,undefined,Enc),
ok = gen_tcp:close(LSock),
ok = gen_tcp:close(Sock)
end.
@@ -929,25 +937,48 @@ get_filenames(_N, {local,F,_}) ->
get_filenames(N, F) ->
rpc:call(N, observer_backend,ttb_get_filenames,[F]).
-receive_files(Dir,Sock,Fd) ->
+receive_files(Dir,Sock,Fd,Enc) ->
case gen_tcp:recv(Sock, 0) of
{ok, <<0,Bin/binary>>} ->
file:write(Fd,Bin),
- receive_files(Dir,Sock,Fd);
- {ok, <<1,Bin/binary>>} ->
- File0 = binary_to_list(Bin),
+ receive_files(Dir,Sock,Fd,Enc);
+ {ok, <<Code,Bin/binary>>} when Code==1; Code==2; Code==3 ->
+ File0 = decode_filename(Code,Bin,Enc),
File = filename:join(Dir,File0),
{ok,Fd1} = file:open(File,[raw,write]),
- receive_files(Dir,Sock,Fd1);
+ receive_files(Dir,Sock,Fd1,Enc);
{error, closed} ->
ok = file:close(Fd)
end.
+decode_filename(1,Bin,_Enc) ->
+ %% Old version of observer_backend - filename encoded with
+ %% list_to_binary
+ binary_to_list(Bin);
+decode_filename(2,Bin,Enc) ->
+ %% Successfully encoded filename with correct encoding
+ unicode:characters_to_list(Bin,Enc);
+decode_filename(3,Bin,latin1) ->
+ %% Filename encoded with faulty encoding. This has to be utf8
+ %% remote and latin1 here, and the filename actually containing
+ %% characters outside the latin1 range. So making an escaped
+ %% variant of the filename and warning about it.
+ File0 = unicode:characters_to_list(Bin,utf8),
+ File = [ case X of
+ High when High > 255 ->
+ ["\\\\x{",erlang:integer_to_list(X, 16),$}];
+ Low ->
+ Low
+ end || X <- File0 ],
+ io:format("Warning: fetching file with faulty filename encoding ~ts~n"
+ "Will be written as ~ts~n",
+ [File0,File]),
+ File.
+
host(Node) ->
[_name,Host] = string:tokens(atom_to_list(Node),"@"),
Host.
-
wait_for_fetch([]) ->
ok;
wait_for_fetch(Nodes) ->
@@ -1087,7 +1118,7 @@ read_traci(File) ->
{ok,B} ->
interpret_binary(B,dict:new(),[]);
_ ->
- io:format("Warning: no meta data file: ~s~n",[MetaFile]),
+ io:format("Warning: no meta data file: ~ts~n",[MetaFile]),
{dict:new(),[]}
end.
@@ -1303,7 +1334,7 @@ get_term(B) ->
end.
display_warning(Item,Warning) ->
- io:format("Warning: {~w,~w}~n",[Warning,Item]).
+ io:format("Warning: {~tw,~tw}~n",[Warning,Item]).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
diff --git a/lib/observer/src/ttb_et.erl b/lib/observer/src/ttb_et.erl
index 95e8e9aa07..1b828eebc0 100644
--- a/lib/observer/src/ttb_et.erl
+++ b/lib/observer/src/ttb_et.erl
@@ -137,13 +137,13 @@ processes(E0) ->
E = label(E0),
{{FromProc,FromNode},{ToProc,ToNode}} =
get_actors(E#event.from,E#event.to),
- {true,E#event{from = io_lib:format("~w~n~w",[FromProc,FromNode]),
- to = io_lib:format("~w~n~w",[ToProc,ToNode])}}.
+ {true,E#event{from = io_lib:format("~tw~n~w",[FromProc,FromNode]),
+ to = io_lib:format("~tw~n~w",[ToProc,ToNode])}}.
mods_and_procs(E) ->
ActorFun = fun({M,_F,_A},{Proc,Node}) ->
- io_lib:format("~w~n~w~n~w",[M,Proc,Node])
+ io_lib:format("~w~n~tw~n~w",[M,Proc,Node])
end,
calltrace_filter(E,ActorFun).
@@ -155,13 +155,13 @@ modules(E) ->
funcs_and_procs(E) ->
ActorFun = fun({M,F,A},{Proc,Node}) ->
- io_lib:format("~s~n~w~n~w",[mfa(M,F,A),Proc,Node])
+ io_lib:format("~ts~n~tw~n~w",[mfa(M,F,A),Proc,Node])
end,
calltrace_filter(E,ActorFun).
functions(E) ->
ActorFun = fun({M,F,A},{_Proc,Node}) ->
- io_lib:format("~s~n~w",[mfa(M,F,A),Node])
+ io_lib:format("~ts~n~w",[mfa(M,F,A),Node])
end,
calltrace_filter(E,ActorFun).
@@ -221,7 +221,7 @@ label(Event=#event{label=L,contents=C}) ->
false -> Event
end.
label(L,{M,F,A}) -> label(L,M,F,A);
-label(L,Other) -> io_lib:format("~w ~w",[L,Other]).
+label(L,Other) -> io_lib:format("~w ~tw",[L,Other]).
label(call,M,F,A) -> "call " ++ mfa(M,F,A);
label(return_from,M,F,A) -> "return_from " ++ mfa(M,F,A);
label(return_to,M,F,A) -> "return_to " ++ mfa(M,F,A);
diff --git a/lib/observer/test/Makefile b/lib/observer/test/Makefile
index 6100af5e17..fcb1b73911 100644
--- a/lib/observer/test/Makefile
+++ b/lib/observer/test/Makefile
@@ -27,7 +27,8 @@ MODULES = \
ttb_SUITE \
client \
server \
- crashdump_helper
+ crashdump_helper \
+ crashdump_helper_unicode
ERL_FILES= $(MODULES:%=%.erl)
@@ -46,7 +47,7 @@ RELSYSDIR = $(RELEASE_PATH)/observer_test
# FLAGS
# ----------------------------------------------------
ERL_MAKE_FLAGS +=
-ERL_COMPILE_FLAGS +=
+ERL_COMPILE_FLAGS += +nowarn_export_all
EBIN = .
diff --git a/lib/observer/test/crashdump_helper_unicode.erl b/lib/observer/test/crashdump_helper_unicode.erl
new file mode 100644
index 0000000000..60c3d20315
--- /dev/null
+++ b/lib/observer/test/crashdump_helper_unicode.erl
@@ -0,0 +1,22 @@
+-module(crashdump_helper_unicode).
+-behaviour(gen_server).
+-export([start/0, init/1, handle_call/3, handle_cast/2]).
+-record(state, {s,a,b,lb}).
+
+start() ->
+ gen_server:start({local, 'unicode_reg_name_αβ'}, ?MODULE, [], []).
+
+init([]) ->
+ process_flag(trap_exit, true),
+ ets:new('tab_αβ',[set,named_table]),
+ Bin = <<"bin αβ"/utf8>>,
+ LongBin = <<"long bin αβ - a utf8 binary which can be expanded αβ"/utf8>>,
+ {ok, #state{s = "unicode_string_αβ",
+ a = 'unicode_atom_αβ',
+ b = Bin,
+ lb = LongBin}}.
+
+handle_call(_Info, _From, State) ->
+ {reply, ok, State}.
+handle_cast(_Info, State) ->
+ {noreply, State}.
diff --git a/lib/observer/test/crashdump_viewer_SUITE.erl b/lib/observer/test/crashdump_viewer_SUITE.erl
index 1fd94ffb3c..77cf086d4b 100644
--- a/lib/observer/test/crashdump_viewer_SUITE.erl
+++ b/lib/observer/test/crashdump_viewer_SUITE.erl
@@ -427,6 +427,18 @@ special(File,Procs) ->
{ok,_,[TW]} = crashdump_viewer:general_info(),
{match,_} = re:run(TW,"CRASH DUMP SIZE LIMIT REACHED"),
ok;
+ ".unicode" ->
+ #proc{pid=Pid0} =
+ lists:keyfind("'unicode_reg_name_αβ'",#proc.name,Procs),
+ Pid = pid_to_list(Pid0),
+ {ok,#proc{},[]} = crashdump_viewer:proc_details(Pid),
+ io:format(" unicode registered name ok",[]),
+
+ {ok,[#ets_table{id="'tab_αβ'",name="'tab_αβ'"}],[]} =
+ crashdump_viewer:ets_tables(Pid),
+ io:format(" unicode table name ok",[]),
+
+ ok;
_ ->
ok
end,
@@ -492,7 +504,8 @@ do_create_dumps(DataDir,Rel) ->
CD5 = dump_with_args(DataDir,Rel,"trunc.bytes",
"-env ERL_CRASH_DUMP_BYTES " ++
integer_to_list(Bytes)),
- {[CD1,CD2,CD3,CD4,CD5], DosDump};
+ CD6 = dump_with_unicode_atoms(DataDir,Rel,"unicode"),
+ {[CD1,CD2,CD3,CD4,CD5,CD6], DosDump};
_ ->
{[CD1,CD2], DosDump}
end.
@@ -573,6 +586,16 @@ dump_with_strange_module_name(DataDir,Rel,DumpName) ->
?t:stop_node(n1),
CD.
+dump_with_unicode_atoms(DataDir,Rel,DumpName) ->
+ Opt = rel_opt(Rel),
+ Pz = "-pz \"" ++ filename:dirname(code:which(?MODULE)) ++ "\"",
+ PzOpt = [{args,Pz}],
+ {ok,N1} = ?t:start_node(n1,peer,Opt ++ PzOpt),
+ {ok,_Pid} = rpc:call(N1,crashdump_helper_unicode,start,[]),
+ CD = dump(N1,DataDir,Rel,DumpName),
+ ?t:stop_node(n1),
+ CD.
+
dump(Node,DataDir,Rel,DumpName) ->
Crashdump = filename:join(DataDir, dump_prefix(Rel)++DumpName),
rpc:call(Node,os,putenv,["ERL_CRASH_DUMP",Crashdump]),
diff --git a/lib/os_mon/src/disksup.erl b/lib/os_mon/src/disksup.erl
index 492e4814da..044604b000 100644
--- a/lib/os_mon/src/disksup.erl
+++ b/lib/os_mon/src/disksup.erl
@@ -285,7 +285,7 @@ check_disk_space({unix, sunos4}, Port, Threshold) ->
Result = my_cmd("df", Port),
check_disks_solaris(skip_to_eol(Result), Threshold);
check_disk_space({unix, darwin}, Port, Threshold) ->
- Result = my_cmd("/bin/df -i -k -t ufs,hfs", Port),
+ Result = my_cmd("/bin/df -i -k -t ufs,hfs,apfs", Port),
check_disks_susv3(skip_to_eol(Result), Threshold).
% This code works for Linux and FreeBSD as well
diff --git a/lib/public_key/doc/src/public_key.xml b/lib/public_key/doc/src/public_key.xml
index 04966ffb9c..942203bd12 100644
--- a/lib/public_key/doc/src/public_key.xml
+++ b/lib/public_key/doc/src/public_key.xml
@@ -129,18 +129,31 @@
<p><c>| 'rsa_no_padding'</c></p>
</item>
+ <tag><c>public_sign_options() =</c></tag>
+ <item><p><c>[{rsa_pad, rsa_sign_padding()} | {rsa_pss_saltlen, integer()}]</c></p></item>
+
+ <tag><c>rsa_sign_padding() =</c></tag>
+ <item>
+ <p><c>'rsa_pkcs1_padding'</c></p>
+ <p><c>| 'rsa_pkcs1_pss_padding'</c></p>
+ </item>
+
<tag><c>digest_type() = </c></tag>
<item><p>Union of <c>rsa_digest_type()</c>, <c>dss_digest_type()</c>,
and <c>ecdsa_digest_type()</c>.</p></item>
<tag><c>rsa_digest_type() = </c></tag>
- <item><p><c>'md5' | 'sha' | 'sha224' | 'sha256' | 'sha384' | 'sha512'</c></p></item>
+ <item><p><c>'md5' | 'ripemd160' | 'sha' | 'sha224' | 'sha256' | 'sha384' | 'sha512'</c></p></item>
<tag><c>dss_digest_type() = </c></tag>
- <item><p><c>'sha'</c></p></item>
+ <item><p><c>'sha' | 'sha224' | 'sha256' | 'sha384' | 'sha512'</c></p>
+ <p>Note that the actual supported dss_digest_type depends on the underlying crypto library.
+ In OpenSSL version >= 1.0.1 the listed digest are supported, while in 1.0.0 only
+ sha, sha224 and sha256 are supported. In version 0.9.8 only sha is supported.</p>
+ </item>
<tag><c>ecdsa_digest_type() = </c></tag>
- <item><p><c>'sha'| 'sha224' | 'sha256' | 'sha384' | 'sha512'</c></p></item>
+ <item><p><c>'sha' | 'sha224' | 'sha256' | 'sha384' | 'sha512'</c></p></item>
<tag><c>crl_reason() = </c></tag>
<item>
@@ -621,8 +634,8 @@ fun(OtpCert :: #'OTPCertificate'{},
<v>OTPCertificate = #'OTPCertificate'{}</v>
<v>DPAndCRLs = [{DP::#'DistributionPoint'{}, {DerCRL::der_encoded(), CRL::#'CertificateList'{}}}] </v>
<v>Options = proplists:proplist()</v>
- <v>CRLStatus() = valid | {bad_cert, revocation_status_undetermined} |
- {bad_cert, {revoked, crl_reason()}}</v>
+ <v>CRLStatus() = valid | {bad_cert, revocation_status_undetermined} | {bad_cert, {revocation_status_undetermined,
+ {bad_crls, Details::term()}}} | {bad_cert, {revoked, crl_reason()}}</v>
</type>
<desc>
<p>Performs CRL validation. It is intended to be called from
@@ -650,7 +663,7 @@ fun(OtpCert :: #'OTPCertificate'{},
<tag>{issuer_fun, fun()}</tag>
<item>
<p>The fun has the following type specification:</p>
-
+
<code>
fun(#'DistributionPoint'{}, #'CertificateList'{},
{rdnSequence,[#'AttributeTypeAndValue'{}]}, term()) ->
@@ -660,7 +673,15 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
that has signed the CRL.
</p>
<code> fun(DP, CRL, Issuer, UserState) -> {ok, RootCert, CertChain}</code>
- </item>
+ </item>
+
+ <tag>{undetermined_details, boolean()}</tag>
+ <item>
+ <p>Defaults to false. When revocation status can not be
+ determined, and this option is set to true, details of why no
+ CRLs where accepted are included in the return value.</p>
+ </item>
+
</taglist>
</desc>
</func>
@@ -795,6 +816,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
<func>
<name>sign(Msg, DigestType, Key) -> binary()</name>
+ <name>sign(Msg, DigestType, Key, Options) -> binary()</name>
<fsummary>Creates a digital signature.</fsummary>
<type>
<v>Msg = binary() | {digest,binary()}</v>
@@ -803,6 +825,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
digest.</d>
<v>DigestType = rsa_digest_type() | dss_digest_type() | ecdsa_digest_type()</v>
<v>Key = rsa_private_key() | dsa_private_key() | ec_private_key()</v>
+ <v>Options = public_sign_options()</v>
</type>
<desc>
<p>Creates a digital signature.</p>
@@ -895,6 +918,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
<func>
<name>verify(Msg, DigestType, Signature, Key) -> boolean()</name>
+ <name>verify(Msg, DigestType, Signature, Key, Options) -> boolean()</name>
<fsummary>Verifies a digital signature.</fsummary>
<type>
<v>Msg = binary() | {digest,binary()}</v>
@@ -903,6 +927,7 @@ fun(#'DistributionPoint'{}, #'CertificateList'{},
<v>DigestType = rsa_digest_type() | dss_digest_type() | ecdsa_digest_type()</v>
<v>Signature = binary()</v>
<v>Key = rsa_public_key() | dsa_public_key() | ec_public_key()</v>
+ <v>Options = public_sign_options()</v>
</type>
<desc>
<p>Verifies a digital signature.</p>
diff --git a/lib/public_key/include/public_key.hrl b/lib/public_key/include/public_key.hrl
index a1e7dd31bc..663e1856ac 100644
--- a/lib/public_key/include/public_key.hrl
+++ b/lib/public_key/include/public_key.hrl
@@ -70,7 +70,8 @@
reasons_mask,
cert_status,
interim_reasons_mask,
- valid_ext
+ valid_ext,
+ details
}).
-record('ECPoint', {
diff --git a/lib/public_key/src/pubkey_crl.erl b/lib/public_key/src/pubkey_crl.erl
index 33bef91827..3621e9c0da 100644
--- a/lib/public_key/src/pubkey_crl.erl
+++ b/lib/public_key/src/pubkey_crl.erl
@@ -58,7 +58,8 @@ validate(OtpCert, OtherDPCRLs, DP, {DerCRL, CRL}, {DerDeltaCRL, DeltaCRL},
init_revokation_state() ->
#revoke_state{reasons_mask = sets:new(),
interim_reasons_mask = sets:new(),
- cert_status = unrevoked}.
+ cert_status = unrevoked,
+ details = []}.
fresh_crl(_, {undefined, undefined}, _) ->
%% Typically happens when there is no delta CRL that covers a CRL
@@ -152,9 +153,10 @@ verify_crl(OtpCert, DP, CRL, DerCRL, DeltaCRL, DerDeltaCRL, OtherDPCRLs,
RevokedState,
CRL, DerCRL, DeltaCRL, DerDeltaCRL,
IssuerFun, TrustedOtpCert, Path, OtherDPCRLs, IDP);
- _ ->
- {invalid, State0#revoke_state{valid_ext = ValidExt}}
- end;
+ _ ->
+ Details = RevokedState#revoke_state.details,
+ {invalid, RevokedState#revoke_state{valid_ext = ValidExt, details = [{{bad_crl, no_issuer_cert_chain}, CRL} | Details]}}
+ end;
{error, issuer_not_found} ->
case Fun(DP, CRL, issuer_not_found, AdditionalArgs) of
{ok, TrustedOtpCert, Path} ->
@@ -163,13 +165,16 @@ verify_crl(OtpCert, DP, CRL, DerCRL, DeltaCRL, DerDeltaCRL, OtherDPCRLs,
DerDeltaCRL, IssuerFun,
TrustedOtpCert, Path, OtherDPCRLs, IDP);
_ ->
- {invalid, {skip, State0}}
- end
+ Details = State0#revoke_state.details,
+ {invalid, {skip, State0#revoke_state{details = [{{bad_crl, no_issuer_cert_chain}, CRL} | Details] }}}
+ end
catch
- throw:{bad_crl, invalid_issuer} ->
- {invalid, {skip, State0}};
- throw:_ ->
- {invalid, State0#revoke_state{valid_ext = ValidExt}}
+ throw:{bad_crl, invalid_issuer} = Reason ->
+ Details = RevokedState#revoke_state.details,
+ {invalid, {skip, RevokedState#revoke_state{details = [{Reason, CRL} | Details]}}};
+ throw:Reason ->
+ Details = RevokedState#revoke_state.details,
+ {invalid, RevokedState#revoke_state{details = [{Reason, CRL} | Details]}}
end.
verify_mask_and_signatures(Revoked, DeltaRevoked, RevokedState, CRL, DerCRL, DeltaCRL, DerDeltaCRL,
@@ -183,10 +188,12 @@ verify_mask_and_signatures(Revoked, DeltaRevoked, RevokedState, CRL, DerCRL, Del
TrustedOtpCert, Path, IssuerFun, OtherDPCRLs, IDP),
{valid, Revoked, DeltaRevoked, RevokedState#revoke_state{reasons_mask = ReasonsMask}, IDP}
catch
- throw:_ ->
- {invalid, RevokedState};
+ throw:Reason ->
+ Details = RevokedState#revoke_state.details,
+ {invalid, RevokedState#revoke_state{details = [{Reason, CRL} | Details]}};
error:{badmatch, _} ->
- {invalid, RevokedState}
+ Details = RevokedState#revoke_state.details,
+ {invalid, RevokedState#revoke_state{details = [{{bad_crl, invalid_signature}, CRL} | Details]}}
end.
@@ -356,7 +363,7 @@ verify_scope(#'OTPCertificate'{tbsCertificate = TBSCert}, #'DistributionPoint'{c
verify_scope(DPName, IDPName, Names, TBSCert, IDP).
verify_scope(asn1_NOVALUE, _, asn1_NOVALUE, _, _) ->
- throw({bad_crl, scope_error1});
+ throw({bad_crl, scope_error});
verify_scope(asn1_NOVALUE, IDPName, DPIssuerNames, TBSCert, IDP) ->
verify_dp_name(IDPName, DPIssuerNames),
verify_dp_bools(TBSCert, IDP);
diff --git a/lib/public_key/src/pubkey_ssh.erl b/lib/public_key/src/pubkey_ssh.erl
index 9bda76d670..75c1880655 100644
--- a/lib/public_key/src/pubkey_ssh.erl
+++ b/lib/public_key/src/pubkey_ssh.erl
@@ -79,7 +79,9 @@ dh_gex_group(Min, N, Max, undefined) ->
dh_gex_group(Min, N, Max, Groups) ->
case select_by_keylen(Min-10, N, Max+10, Groups) of
{ok,{Sz,GPs}} ->
- {ok, {Sz,lists:nth(crypto:rand_uniform(1, 1+length(GPs)), GPs)}};
+ Rnd = rand:uniform( length(GPs) ),
+ %% 1 =< Rnd =< length(GPs)
+ {ok, {Sz, lists:nth(Rnd,GPs)}};
Other ->
Other
end.
diff --git a/lib/public_key/src/public_key.erl b/lib/public_key/src/public_key.erl
index 6651e9510e..c2060c144c 100644
--- a/lib/public_key/src/public_key.erl
+++ b/lib/public_key/src/public_key.erl
@@ -37,7 +37,7 @@
decrypt_public/2, decrypt_public/3,
dh_gex_group/4,
dh_gex_group_sizes/0,
- sign/3, verify/4,
+ sign/3, sign/4, verify/4, verify/5,
generate_key/1,
compute_key/2, compute_key/3,
pkix_sign/2, pkix_verify/2,
@@ -90,10 +90,12 @@
auth_keys.
-type rsa_padding() :: 'rsa_pkcs1_padding' | 'rsa_pkcs1_oaep_padding'
| 'rsa_no_padding'.
+-type rsa_sign_padding() :: 'rsa_pkcs1_padding' | 'rsa_pkcs1_pss_padding'.
-type public_crypt_options() :: [{rsa_pad, rsa_padding()}].
--type rsa_digest_type() :: 'md5' | 'sha'| 'sha224' | 'sha256' | 'sha384' | 'sha512'.
--type dss_digest_type() :: 'none' | 'sha'. %% None is for backwards compatibility
--type ecdsa_digest_type() :: 'sha'| 'sha224' | 'sha256' | 'sha384' | 'sha512'.
+-type rsa_digest_type() :: 'md5' | 'ripemd160' | 'sha' | 'sha224' | 'sha256' | 'sha384' | 'sha512'.
+-type dss_digest_type() :: 'none' | 'sha' | 'sha224' | 'sha256' | 'sha384' | 'sha512'. %% None is for backwards compatibility
+-type ecdsa_digest_type() :: 'sha' | 'sha224' | 'sha256' | 'sha384' | 'sha512'.
+-type public_sign_options() :: [{rsa_pad, rsa_sign_padding()} | {rsa_pss_saltlen, integer()}].
-type digest_type() :: rsa_digest_type() | dss_digest_type() | ecdsa_digest_type().
-type crl_reason() :: unspecified | keyCompromise | cACompromise | affiliationChanged | superseded
| cessationOfOperation | certificateHold | privilegeWithdrawn | aACompromise.
@@ -417,7 +419,7 @@ generate_key({rsa, ModulusSize, PublicExponent}) ->
{[E, N], [E, N, D, P, Q, D_mod_P_1, D_mod_Q_1, InvQ_mod_P]} ->
Nint = crypto:bytes_to_integer(N),
Eint = crypto:bytes_to_integer(E),
- #'RSAPrivateKey'{version = 0, % Two-factor (I guess since otherPrimeInfos is not given)
+ #'RSAPrivateKey'{version = 'two-prime', % Two-factor (I guess since otherPrimeInfos is not given)
modulus = Nint,
publicExponent = Eint,
privateExponent = crypto:bytes_to_integer(D),
@@ -435,7 +437,7 @@ generate_key({rsa, ModulusSize, PublicExponent}) ->
% 1976.
Nint = crypto:bytes_to_integer(N),
Eint = crypto:bytes_to_integer(E),
- #'RSAPrivateKey'{version = 0, % Two-factor (I guess since otherPrimeInfos is not given)
+ #'RSAPrivateKey'{version = 'two-prime', % Two-factor (I guess since otherPrimeInfos is not given)
modulus = Nint,
publicExponent = Eint,
privateExponent = crypto:bytes_to_integer(D),
@@ -498,35 +500,67 @@ pkix_sign_types(?'ecdsa-with-SHA512') ->
{sha512, ecdsa}.
%%--------------------------------------------------------------------
--spec sign(binary() | {digest, binary()}, rsa_digest_type() | dss_digest_type() | ecdsa_digest_type(),
- rsa_private_key() |
- dsa_private_key() | ec_private_key()) -> Signature :: binary().
-%% Description: Create digital signature.
-%%--------------------------------------------------------------------
-sign(DigestOrPlainText, DigestType, Key = #'RSAPrivateKey'{}) ->
- crypto:sign(rsa, DigestType, DigestOrPlainText, format_rsa_private_key(Key));
+-spec sign(binary() | {digest, binary()},
+ rsa_digest_type() | dss_digest_type() | ecdsa_digest_type(),
+ rsa_private_key() | dsa_private_key() | ec_private_key()
+ ) -> Signature :: binary().
-sign(DigestOrPlainText, sha, #'DSAPrivateKey'{p = P, q = Q, g = G, x = X}) ->
- crypto:sign(dss, sha, DigestOrPlainText, [P, Q, G, X]);
+-spec sign(binary() | {digest, binary()},
+ rsa_digest_type() | dss_digest_type() | ecdsa_digest_type(),
+ rsa_private_key() | dsa_private_key() | ec_private_key(),
+ public_sign_options()
+ ) -> Signature :: binary().
-sign(DigestOrPlainText, DigestType, #'ECPrivateKey'{privateKey = PrivKey,
- parameters = Param}) ->
- ECCurve = ec_curve_spec(Param),
- crypto:sign(ecdsa, DigestType, DigestOrPlainText, [PrivKey, ECCurve]);
+%% Description: Create digital signature.
+%%--------------------------------------------------------------------
+sign(DigestOrPlainText, DigestType, Key) ->
+ sign(DigestOrPlainText, DigestType, Key, []).
%% Backwards compatible
-sign(Digest, none, #'DSAPrivateKey'{} = Key) ->
- sign({digest,Digest}, sha, Key).
+sign(Digest, none, Key = #'DSAPrivateKey'{}, Options) when is_binary(Digest) ->
+ sign({digest, Digest}, sha, Key, Options);
+sign(DigestOrPlainText, DigestType, Key, Options) ->
+ case format_sign_key(Key) of
+ badarg ->
+ erlang:error(badarg, [DigestOrPlainText, DigestType, Key, Options]);
+ {Algorithm, CryptoKey} ->
+ crypto:sign(Algorithm, DigestType, DigestOrPlainText, CryptoKey, Options)
+ end.
%%--------------------------------------------------------------------
--spec verify(binary() | {digest, binary()}, rsa_digest_type() | dss_digest_type() | ecdsa_digest_type(),
- Signature :: binary(), rsa_public_key()
- | dsa_public_key() | ec_public_key()) -> boolean().
+-spec verify(binary() | {digest, binary()},
+ rsa_digest_type() | dss_digest_type() | ecdsa_digest_type(),
+ Signature :: binary(),
+ rsa_public_key() | dsa_public_key() | ec_public_key()
+ | rsa_private_key() | dsa_private_key() | ec_private_key()
+ ) -> boolean().
+
+-spec verify(binary() | {digest, binary()},
+ rsa_digest_type() | dss_digest_type() | ecdsa_digest_type(),
+ Signature :: binary(),
+ rsa_public_key() | dsa_public_key() | ec_public_key()
+ | rsa_private_key() | dsa_private_key() | ec_private_key(),
+ public_sign_options()
+ ) -> boolean().
+
%% Description: Verifies a digital signature.
%%--------------------------------------------------------------------
-verify(DigestOrPlainText, DigestType, Signature, Key) when is_binary(Signature) ->
- do_verify(DigestOrPlainText, DigestType, Signature, Key);
-verify(_,_,_,_) ->
+verify(DigestOrPlainText, DigestType, Signature, Key) ->
+ verify(DigestOrPlainText, DigestType, Signature, Key, []).
+
+%% Backwards compatible
+verify(Digest, none, Signature, Key = {_, #'Dss-Parms'{}}, Options) when is_binary(Digest) ->
+ verify({digest, Digest}, sha, Signature, Key, Options);
+verify(Digest, none, Signature, Key = #'DSAPrivateKey'{}, Options) when is_binary(Digest) ->
+ verify({digest, Digest}, sha, Signature, Key, Options);
+verify(DigestOrPlainText, DigestType, Signature, Key, Options) when is_binary(Signature) ->
+ case format_verify_key(Key) of
+ badarg ->
+ erlang:error(badarg, [DigestOrPlainText, DigestType, Signature, Key, Options]);
+ {Algorithm, CryptoKey} ->
+ crypto:verify(Algorithm, DigestType, DigestOrPlainText, Signature, CryptoKey, Options)
+ end;
+verify(_,_,_,_,_) ->
%% If Signature is a bitstring and not a binary we know already at this
%% point that the signature is invalid.
false.
@@ -789,8 +823,9 @@ pkix_path_validation(#'OTPCertificate'{} = TrustedCert, CertChain, Options)
%--------------------------------------------------------------------
-spec pkix_crls_validate(#'OTPCertificate'{},
[{DP::#'DistributionPoint'{}, {DerCRL::binary(), CRL::#'CertificateList'{}}}],
- Options :: proplists:proplist()) -> valid | {bad_cert, revocation_status_undetermined}
- | {bad_cert, {revoked, crl_reason()}}.
+ Options :: proplists:proplist()) -> valid | {bad_cert, revocation_status_undetermined} |
+ {bad_cert, {revocation_status_undetermined, Reason::term()}} |
+ {bad_cert, {revoked, crl_reason()}}.
%% Description: Performs a CRL validation according to RFC 5280.
%%--------------------------------------------------------------------
@@ -993,22 +1028,32 @@ short_name_hash({rdnSequence, _Attributes} = Name) ->
%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
-do_verify(DigestOrPlainText, DigestType, Signature,
- #'RSAPublicKey'{modulus = Mod, publicExponent = Exp}) ->
- crypto:verify(rsa, DigestType, DigestOrPlainText, Signature,
- [Exp, Mod]);
-
-do_verify(DigestOrPlaintext, DigestType, Signature, {#'ECPoint'{point = Point}, Param}) ->
- ECCurve = ec_curve_spec(Param),
- crypto:verify(ecdsa, DigestType, DigestOrPlaintext, Signature, [Point, ECCurve]);
-
-%% Backwards compatibility
-do_verify(Digest, none, Signature, {_, #'Dss-Parms'{}} = Key ) ->
- verify({digest,Digest}, sha, Signature, Key);
-
-do_verify(DigestOrPlainText, sha = DigestType, Signature, {Key, #'Dss-Parms'{p = P, q = Q, g = G}})
- when is_integer(Key), is_binary(Signature) ->
- crypto:verify(dss, DigestType, DigestOrPlainText, Signature, [P, Q, G, Key]).
+format_sign_key(Key = #'RSAPrivateKey'{}) ->
+ {rsa, format_rsa_private_key(Key)};
+format_sign_key(#'DSAPrivateKey'{p = P, q = Q, g = G, x = X}) ->
+ {dss, [P, Q, G, X]};
+format_sign_key(#'ECPrivateKey'{privateKey = PrivKey, parameters = Param}) ->
+ {ecdsa, [PrivKey, ec_curve_spec(Param)]};
+format_sign_key(_) ->
+ badarg.
+
+format_verify_key(#'RSAPublicKey'{modulus = Mod, publicExponent = Exp}) ->
+ {rsa, [Exp, Mod]};
+format_verify_key({#'ECPoint'{point = Point}, Param}) ->
+ {ecdsa, [Point, ec_curve_spec(Param)]};
+format_verify_key({Key, #'Dss-Parms'{p = P, q = Q, g = G}}) ->
+ {dss, [P, Q, G, Key]};
+%% Convert private keys to public keys
+format_verify_key(#'RSAPrivateKey'{modulus = Mod, publicExponent = Exp}) ->
+ format_verify_key(#'RSAPublicKey'{modulus = Mod, publicExponent = Exp});
+format_verify_key(#'ECPrivateKey'{parameters = Param, publicKey = {_, Point}}) ->
+ format_verify_key({#'ECPoint'{point = Point}, Param});
+format_verify_key(#'ECPrivateKey'{parameters = Param, publicKey = Point}) ->
+ format_verify_key({#'ECPoint'{point = Point}, Param});
+format_verify_key(#'DSAPrivateKey'{y=Y, p=P, q=Q, g=G}) ->
+ format_verify_key({Y, #'Dss-Parms'{p=P, q=Q, g=G}});
+format_verify_key(_) ->
+ badarg.
do_pem_entry_encode(Asn1Type, Entity, CipherInfo, Password) ->
Der = der_encode(Asn1Type, Entity),
@@ -1121,8 +1166,13 @@ der_cert(#'OTPCertificate'{} = Cert) ->
der_cert(Der) when is_binary(Der) ->
Der.
-pkix_crls_validate(_, [],_, _, _) ->
- {bad_cert, revocation_status_undetermined};
+pkix_crls_validate(_, [],_, Options, #revoke_state{details = Details}) ->
+ case proplists:get_value(undetermined_details, Options, false) of
+ false ->
+ {bad_cert, revocation_status_undetermined};
+ true ->
+ {bad_cert, {revocation_status_undetermined, {bad_crls, format_details(Details)}}}
+ end;
pkix_crls_validate(OtpCert, [{DP, CRL, DeltaCRL} | Rest], All, Options, RevokedState0) ->
CallBack = proplists:get_value(update_crl, Options, fun(_, CurrCRL) ->
CurrCRL
@@ -1142,9 +1192,14 @@ pkix_crls_validate(OtpCert, [{DP, CRL, DeltaCRL} | Rest], All, Options, Revoked
do_pkix_crls_validate(OtpCert, [{DP, CRL, DeltaCRL} | Rest], All, Options, RevokedState0) ->
OtherDPCRLs = All -- [{DP, CRL, DeltaCRL}],
case pubkey_crl:validate(OtpCert, OtherDPCRLs, DP, CRL, DeltaCRL, Options, RevokedState0) of
- {undetermined, _, _} when Rest == []->
- {bad_cert, revocation_status_undetermined};
- {undetermined, _, RevokedState} when Rest =/= []->
+ {undetermined, unrevoked, #revoke_state{details = Details}} when Rest == []->
+ case proplists:get_value(undetermined_details, Options, false) of
+ false ->
+ {bad_cert, revocation_status_undetermined};
+ true ->
+ {bad_cert, {revocation_status_undetermined, {bad_crls, Details}}}
+ end;
+ {undetermined, unrevoked, RevokedState} when Rest =/= []->
pkix_crls_validate(OtpCert, Rest, All, Options, RevokedState);
{finished, unrevoked} ->
valid;
@@ -1417,3 +1472,7 @@ to_lower_ascii(C) -> C.
to_string(S) when is_list(S) -> S;
to_string(B) when is_binary(B) -> binary_to_list(B).
+format_details([]) ->
+ no_relevant_crls;
+format_details(Details) ->
+ Details.
diff --git a/lib/public_key/test/erl_make_certs.erl b/lib/public_key/test/erl_make_certs.erl
index e4118bab0d..e772ea1734 100644
--- a/lib/public_key/test/erl_make_certs.erl
+++ b/lib/public_key/test/erl_make_certs.erl
@@ -178,8 +178,9 @@ make_tbs(SubjectKey, Opts) ->
_ ->
subject(proplists:get_value(subject, Opts),false)
end,
-
- {#'OTPTBSCertificate'{serialNumber = trunc(random:uniform()*100000000)*10000 + 1,
+ Rnd = rand:uniform( 1000000000000 ),
+ %% 1 =< Rnd < 1000000000001
+ {#'OTPTBSCertificate'{serialNumber = Rnd,
signature = SignAlgo,
issuer = Issuer,
validity = validity(Opts),
@@ -466,7 +467,8 @@ odd_rand(Size) ->
odd_rand(Min, Max).
odd_rand(Min,Max) ->
- Rand = crypto:rand_uniform(Min,Max),
+ %% Odd random number N such that Min =< N =< Max
+ Rand = (Min-1) + rand:uniform(Max-Min), % Min =< Rand < Max
case Rand rem 2 of
0 ->
Rand + 1;
diff --git a/lib/reltool/src/reltool.erl b/lib/reltool/src/reltool.erl
index f6ce5578bc..feb6925044 100644
--- a/lib/reltool/src/reltool.erl
+++ b/lib/reltool/src/reltool.erl
@@ -80,7 +80,7 @@ get_server(WinPid) ->
{ok, _ServerPid} = OK ->
OK;
{error, Reason} ->
- {error, lists:flatten(io_lib:format("~p", [Reason]))}
+ {error, lists:flatten(io_lib:format("~tp", [Reason]))}
end.
%% Stop a server or window process
@@ -93,7 +93,7 @@ stop(Pid) when is_pid(Pid) ->
{'DOWN', Ref, _, _, shutdown} ->
ok;
{'DOWN', Ref, _, _, Reason} ->
- {error, lists:flatten(io_lib:format("~p", [Reason]))}
+ {error, lists:flatten(io_lib:format("~tp", [Reason]))}
end.
%% Internal library function
diff --git a/lib/reltool/src/reltool.hrl b/lib/reltool/src/reltool.hrl
index 8b4898570b..9c8aae6b7e 100644
--- a/lib/reltool/src/reltool.hrl
+++ b/lib/reltool/src/reltool.hrl
@@ -119,7 +119,7 @@
| {archive, base_file(), [archive_opt()], [target_spec()]}
| {copy_file, base_file()}
| {copy_file, base_file(), top_file()}
- | {write_file, base_file(), iolist()}
+ | {write_file, base_file(), binary()}
| {strip_beam_file, base_file()}.
-type target_dir() :: dir().
-type incl_defaults() :: boolean().
diff --git a/lib/reltool/src/reltool_app_win.erl b/lib/reltool/src/reltool_app_win.erl
index 468ba297bb..663144861f 100644
--- a/lib/reltool/src/reltool_app_win.erl
+++ b/lib/reltool/src/reltool_app_win.erl
@@ -174,7 +174,7 @@ loop(#state{xref_pid = Xref, common = C, app = App} = S) ->
S#state.mod_wins)},
?MODULE:loop(S2);
Msg ->
- error_logger:format("~w~w got unexpected message:\n\t~p\n",
+ error_logger:format("~w~w got unexpected message:\n\t~tp\n",
[?MODULE, self(), Msg]),
?MODULE:loop(S)
end.
@@ -182,7 +182,7 @@ loop(#state{xref_pid = Xref, common = C, app = App} = S) ->
exit_warning({'EXIT', _Pid, shutdown}) ->
ok;
exit_warning({'EXIT', _Pid, _Reason} = Msg) ->
- error_logger:format("~w~w got unexpected message:\n\t~p\n",
+ error_logger:format("~w~w got unexpected message:\n\t~tp\n",
[?MODULE, self(), Msg]).
create_window(#state{app = App} = S) ->
@@ -629,7 +629,7 @@ handle_event(#state{sys = Sys, app = App} = S, Wx) ->
handle_mod_button(S, Items, Action);
_ ->
error_logger:format("~w~w got unexpected app event from "
- "wx:\n\t~p\n",
+ "wx:\n\t~tp\n",
[?MODULE, self(), Wx]),
S
end.
@@ -676,7 +676,7 @@ move_mod(App, {_ItemNo, ModStr}, Action) ->
undefined;
_ ->
error_logger:format("~w~w got unexpected mod "
- "button event: ~w\n\t ~p\n",
+ "button event: ~w\n\t ~tp\n",
[?MODULE, self(), ModName, Action]),
M#mod.incl_cond
end,
diff --git a/lib/reltool/src/reltool_fgraph_win.erl b/lib/reltool/src/reltool_fgraph_win.erl
index 915330794c..a10a2281db 100644
--- a/lib/reltool/src/reltool_fgraph_win.erl
+++ b/lib/reltool/src/reltool_fgraph_win.erl
@@ -526,7 +526,7 @@ loop(S, G) ->
exit(Reason);
Other ->
- error_logger:format("~w~w got unexpected message:\n\t~p\n",
+ error_logger:format("~w~w got unexpected message:\n\t~tp\n",
[?MODULE, self(), Other]),
loop(S, G)
end.
diff --git a/lib/reltool/src/reltool_mod_win.erl b/lib/reltool/src/reltool_mod_win.erl
index 8cd63bdda1..2d56d74563 100644
--- a/lib/reltool/src/reltool_mod_win.erl
+++ b/lib/reltool/src/reltool_mod_win.erl
@@ -171,7 +171,7 @@ loop(#state{xref_pid = Xref, common = C, mod = Mod} = S) ->
S2 = handle_event(S, Wx),
?MODULE:loop(S2);
_ ->
- error_logger:format("~w~w got unexpected message:\n\t~p\n",
+ error_logger:format("~w~w got unexpected message:\n\t~tp\n",
[?MODULE, self(), Msg]),
?MODULE:loop(S)
end
@@ -487,7 +487,7 @@ handle_event(#state{xref_pid = Xref} = S, Wx) ->
S;
_ ->
error_logger:format("~w~w got unexpected mod event from "
- "wx:\n\t~p\n",
+ "wx:\n\t~tp\n",
[?MODULE, self(), Wx]),
S
end.
@@ -667,7 +667,7 @@ goto_function(S, Editor) ->
wxStyledTextCtrl:setSelection(Editor, Left2, Right2),
Text = wxStyledTextCtrl:getSelectedText(Editor),
S2 = add_pos_to_history(S, CurrentPos),
- do_goto_function(S2, string:tokens(Text, ":"));
+ do_goto_function(S2, string:lexemes(Text, ":"));
_ ->
%% No function call
wxStyledTextCtrl:hideSelection(Editor, false),
diff --git a/lib/reltool/src/reltool_server.erl b/lib/reltool/src/reltool_server.erl
index 89e90670cf..853191c696 100644
--- a/lib/reltool/src/reltool_server.erl
+++ b/lib/reltool/src/reltool_server.erl
@@ -225,12 +225,12 @@ parse_options([{Key, Val} | KeyVals], S, C, Sys) ->
Sys2 = read_config(Sys, {sys, Val}),
parse_options(KeyVals, S, C, Sys2);
_ ->
- reltool_utils:throw_error("Illegal option: ~p", [{Key, Val}])
+ reltool_utils:throw_error("Illegal option: ~tp", [{Key, Val}])
end;
parse_options([], S, C, Sys) ->
S#state{common = C, sys = Sys};
parse_options(KeyVals, _S, _C, _Sys) ->
- reltool_utils:throw_error("Illegal option: ~p", [KeyVals]).
+ reltool_utils:throw_error("Illegal option: ~tp", [KeyVals]).
loop(#state{sys = Sys} = S) ->
receive
@@ -400,12 +400,12 @@ loop(#state{sys = Sys} = S) ->
{'EXIT', Pid, Reason} when Pid =:= S#state.parent_pid ->
exit(Reason);
{call, ReplyTo, Ref, Msg} when is_pid(ReplyTo), is_reference(Ref) ->
- error_logger:format("~w~w got unexpected call:\n\t~p\n",
+ error_logger:format("~w~w got unexpected call:\n\t~tp\n",
[?MODULE, self(), Msg]),
reltool_utils:reply(ReplyTo, Ref, {error, {invalid_call, Msg}}),
?MODULE:loop(S);
Msg ->
- error_logger:format("~w~w got unexpected message:\n\t~p\n",
+ error_logger:format("~w~w got unexpected message:\n\t~tp\n",
[?MODULE, self(), Msg]),
?MODULE:loop(S)
end.
@@ -1232,7 +1232,7 @@ parse_app_info(File, [{Key, Val} | KeyVals], AI, Status) ->
Status);
_ ->
Status2 =
- reltool_utils:add_warning("Unexpected item ~p in app file ~tp.",
+ reltool_utils:add_warning("Unexpected item ~tp in app file ~tp.",
[Key,File],
Status),
parse_app_info(File, KeyVals, AI, Status2)
@@ -1417,9 +1417,12 @@ shrink_app(A) ->
do_save_config(S, Filename, InclDef, InclDeriv) ->
{ok, Config} = do_get_config(S, InclDef, InclDeriv),
- IoList = io_lib:format("%% config generated at ~w ~w\n~p.\n\n",
- [date(), time(), Config]),
- file:write_file(Filename, IoList).
+ IoList = io_lib:format("%% ~s\n"
+ "%% config generated at ~w ~w\n"
+ "~tp.\n\n",
+ [epp:encoding_to_string(utf8),date(), time(), Config]),
+ Bin = unicode:characters_to_binary(IoList),
+ file:write_file(Filename, Bin).
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -1455,7 +1458,7 @@ read_config(OldSys, {sys, KeyVals}) ->
[NewSys2#sys.boot_rel])
end;
read_config(_OldSys, BadConfig) ->
- reltool_utils:throw_error("Illegal content: ~p", [BadConfig]).
+ reltool_utils:throw_error("Illegal content: ~tp", [BadConfig]).
decode(#sys{apps = Apps} = Sys, [{erts = Name, AppKeyVals} | SysKeyVals])
when is_atom(Name), is_list(AppKeyVals) ->
@@ -1565,7 +1568,7 @@ decode(#sys{} = Sys, [{Key, Val} | KeyVals]) ->
debug_info when Val =:= keep; Val =:= strip ->
Sys#sys{debug_info = Val};
_ ->
- reltool_utils:throw_error("Illegal option: ~p", [{Key, Val}])
+ reltool_utils:throw_error("Illegal option: ~tp", [{Key, Val}])
end,
decode(Sys3, KeyVals);
decode(#app{} = App, [{Key, Val} | KeyVals]) ->
@@ -1620,14 +1623,14 @@ decode(#app{} = App, [{Key, Val} | KeyVals]) ->
active_dir = Dir,
sorted_dirs = [Dir]};
false ->
- reltool_utils:throw_error("Illegal lib dir for ~w: ~p",
+ reltool_utils:throw_error("Illegal lib dir for ~w: ~tp",
[App#app.name, Val])
end;
SelectVsn when SelectVsn=:=vsn; SelectVsn=:=lib_dir ->
reltool_utils:throw_error("Mutual exclusive options "
"'vsn' and 'lib_dir'",[]);
_ ->
- reltool_utils:throw_error("Illegal option: ~p", [{Key, Val}])
+ reltool_utils:throw_error("Illegal option: ~tp", [{Key, Val}])
end,
decode(App2, KeyVals);
decode(#app{mods = Mods} = App, [{mod, Name, ModKeyVals} | AppKeyVals]) ->
@@ -1641,7 +1644,7 @@ decode(#mod{} = Mod, [{Key, Val} | KeyVals]) ->
debug_info when Val =:= keep; Val =:= strip ->
Mod#mod{debug_info = Val};
_ ->
- reltool_utils:throw_error("Illegal option: ~p", [{Key, Val}])
+ reltool_utils:throw_error("Illegal option: ~tp", [{Key, Val}])
end,
decode(Mod2, KeyVals);
decode(#rel{rel_apps = RelApps} = Rel, [RelApp | KeyVals]) ->
@@ -1666,12 +1669,12 @@ decode(#rel{rel_apps = RelApps} = Rel, [RelApp | KeyVals]) ->
true ->
decode(Rel#rel{rel_apps = RelApps ++ [RA]}, KeyVals);
false ->
- reltool_utils:throw_error("Illegal option: ~p", [RelApp])
+ reltool_utils:throw_error("Illegal option: ~tp", [RelApp])
end;
decode(Acc, []) ->
Acc;
decode(_Acc, KeyVal) ->
- reltool_utils:throw_error("Illegal option: ~p", [KeyVal]).
+ reltool_utils:throw_error("Illegal option: ~tp", [KeyVal]).
is_type(Type) ->
case Type of
@@ -1866,7 +1869,7 @@ escripts_to_apps([Escript | Escripts], Apps, Status) ->
{ok, AF} ->
AF;
{error, Reason1} ->
- reltool_utils:throw_error("Illegal escript ~tp: ~p",
+ reltool_utils:throw_error("Illegal escript ~tp: ~tp",
[Escript,Reason1])
end,
@@ -1950,7 +1953,7 @@ escripts_to_apps([Escript | Escripts], Apps, Status) ->
Status2),
escripts_to_apps(Escripts, Apps2, Status3);
{error, Reason2} ->
- reltool_utils:throw_error("Illegal escript ~tp: ~p",
+ reltool_utils:throw_error("Illegal escript ~tp: ~tp",
[Escript,Reason2])
end;
escripts_to_apps([], Apps, Status) ->
@@ -2013,7 +2016,7 @@ init_escript_app(AppName, EscriptAppName, Dir, Info, Mods, Apps, Status) ->
case lists:keymember(AppName, #app.name, Apps) of
true ->
reltool_utils:throw_error(
- "~w: Application name clash. Escript ~tp contains application ~tp.",
+ "~w: Application name clash. Escript ~tp contains application ~w.",
[AppName,Dir,AppName]);
false ->
{App2, Status}
diff --git a/lib/reltool/src/reltool_sys_win.erl b/lib/reltool/src/reltool_sys_win.erl
index ba0d90ef5f..92df270752 100644
--- a/lib/reltool/src/reltool_sys_win.erl
+++ b/lib/reltool/src/reltool_sys_win.erl
@@ -136,7 +136,7 @@ init(Options) ->
do_init(Options)
catch
error:Reason ->
- io:format("~p: ~p~n",[Reason, erlang:get_stacktrace()]),
+ io:format("~tp: ~tp~n",[Reason, erlang:get_stacktrace()]),
exit({Reason, erlang:get_stacktrace()})
end.
@@ -182,7 +182,7 @@ do_init([{safe_config, Safe}, {parent, Parent} | Options]) ->
end.
restart_server_safe_config(true,Parent,Reason) ->
- io:format("~w(~w): <ERROR> ~p\n", [?MODULE, ?LINE, Reason]),
+ io:format("~w(~w): <ERROR> ~tp\n", [?MODULE, ?LINE, Reason]),
proc_lib:init_ack(Parent, {error,Reason});
restart_server_safe_config(false,Parent,Reason) ->
wx:new(),
@@ -199,7 +199,7 @@ restart_server_safe_config(false,Parent,Reason) ->
?wxID_OK ->
do_init([{safe_config,true},{parent,Parent},?safe_config]);
?wxID_CANCEL ->
- io:format("~w(~w): <ERROR> ~p\n", [?MODULE, ?LINE, Reason]),
+ io:format("~w(~w): <ERROR> ~tp\n", [?MODULE, ?LINE, Reason]),
proc_lib:init_ack(Parent,{error,Reason})
end.
@@ -251,7 +251,7 @@ loop(S) ->
?MODULE:loop(S#state{warning_wins = WWs2});
false ->
error_logger:format("~w~w got unexpected "
- "message:\n\t~p\n",
+ "message:\n\t~tp\n",
[?MODULE, self(), Msg]),
?MODULE:loop(S)
end
@@ -292,7 +292,7 @@ loop(S) ->
S#state.app_wins),
?MODULE:loop(S#state{fgraph_wins = FWs, app_wins = AWs});
Msg ->
- error_logger:format("~w~w got unexpected message:\n\t~p\n",
+ error_logger:format("~w~w got unexpected message:\n\t~tp\n",
[?MODULE, self(), Msg]),
?MODULE:loop(S)
end.
@@ -316,7 +316,7 @@ handle_child_exit({'EXIT', Pid, _Reason} = Exit, FWs, AWs) ->
msg_warning({'EXIT', _Pid, shutdown}, Type) when Type =/= unknown ->
ok;
msg_warning(Exit, Type) ->
- error_logger:format("~w~w got unexpected message (~w):\n\t~p\n",
+ error_logger:format("~w~w got unexpected message (~w):\n\t~tp\n",
[?MODULE, self(), Type, Exit]).
create_window(S) ->
@@ -1163,12 +1163,12 @@ handle_system_event(#state{sys = Sys} = S,
do_set_sys(S#state{sys = Sys2});
handle_system_event(S, Event, ObjRef, UserData) ->
error_logger:format("~w~w got unexpected wx sys event to ~p "
- "with user data: ~p\n\t ~p\n",
+ "with user data: ~tp\n\t ~tp\n",
[?MODULE, self(), ObjRef, UserData, Event]),
S.
handle_release_event(S, _Event, _ObjRef, UserData) ->
- io:format("Release data: ~p\n", [UserData]),
+ io:format("Release data: ~tp\n", [UserData]),
S.
handle_source_event(S,
@@ -1225,7 +1225,7 @@ handle_app_event(S,
handle_app_button(S, Items, Action);
handle_app_event(S, Event, ObjRef, UserData) ->
error_logger:format("~w~w got unexpected wx app event to "
- "~p with user data: ~p\n\t ~p\n",
+ "~p with user data: ~tp\n\t ~tp\n",
[?MODULE, self(), ObjRef, UserData, Event]),
S.
@@ -1269,7 +1269,7 @@ move_app(S, {_ItemNo, AppBase}, Action) ->
undefined;
_ ->
error_logger:format("~w~w got unexpected app "
- "button event: ~p ~p\n",
+ "button event: ~tp ~tp\n",
[?MODULE, self(), Action, AppBase]),
OldApp#app.incl_cond
end,
@@ -1543,7 +1543,7 @@ check_and_refresh(S, Status) ->
display_message(Reason, ?wxICON_ERROR),
false;
{error, Reason} ->
- Msg = lists:flatten(io_lib:format("Error:\n\n~p\n", [Reason])),
+ Msg = lists:flatten(io_lib:format("Error:\n\n~tp\n", [Reason])),
display_message(Msg, ?wxICON_ERROR),
false
end,
diff --git a/lib/reltool/src/reltool_target.erl b/lib/reltool/src/reltool_target.erl
index 1615a3e9b7..1b1461178e 100644
--- a/lib/reltool/src/reltool_target.erl
+++ b/lib/reltool/src/reltool_target.erl
@@ -787,16 +787,20 @@ do_spec_rel_files(#rel{name = RelName} = Rel, Sys) ->
{ok, BootBin} = gen_boot(Script),
Date = date(),
Time = time(),
- RelIoList = io_lib:format("%% rel generated at ~w ~w\n~p.\n\n",
+ RelIoList = io_lib:format("%% rel generated at ~w ~w\n~tp.\n\n",
[Date, Time, GenRel]),
- ScriptIoList = io_lib:format("%% script generated at ~w ~w\n~p.\n\n",
+ ScriptIoList = io_lib:format("%% script generated at ~w ~w\n~tp.\n\n",
[Date, Time, Script]),
[
- {write_file, RelFile, RelIoList},
- {write_file, ScriptFile, ScriptIoList},
+ {write_file, RelFile, to_utf8_bin_with_enc_comment(RelIoList)},
+ {write_file, ScriptFile, to_utf8_bin_with_enc_comment(ScriptIoList)},
{write_file, BootFile, BootBin}
].
+to_utf8_bin_with_enc_comment(IoList) when is_list(IoList) ->
+ unicode:characters_to_binary("%% " ++ epp:encoding_to_string(utf8) ++ "\n"
+ ++ IoList).
+
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%% Generate a complete target system
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
@@ -1001,7 +1005,8 @@ spec_start_file(#sys{boot_rel = BootRelName,
{value, Erts} = lists:keysearch(erts, #app.name, Apps),
{value, BootRel} = lists:keysearch(BootRelName, #rel.name, Rels),
Data = Erts#app.vsn ++ " " ++ BootRel#rel.vsn ++ "\n",
- {BootRel#rel.vsn, {write_file, "start_erl.data", Data}}.
+ {BootRel#rel.vsn, {write_file, "start_erl.data",
+ unicode:characters_to_binary(Data)}}.
lookup_spec(Prefix, Specs) ->
lists:filter(fun(S) -> lists:prefix(Prefix, element(2, S)) end, Specs).
@@ -1183,18 +1188,18 @@ spec_app_file(#app{name = Name,
Info#app_info.modules)],
App2 = App#app{info = Info#app_info{modules = ModNames}},
Contents = gen_app(App2),
- AppIoList = io_lib:format("%% app generated at ~w ~w\n~p.\n\n",
+ AppIoList = io_lib:format("%% app generated at ~w ~w\n~tp.\n\n",
[date(), time(), Contents]),
- [{write_file, AppFilename, AppIoList}];
+ [{write_file, AppFilename, to_utf8_bin_with_enc_comment(AppIoList)}];
all ->
%% Include all included modules
%% Generate new file
ModNames = [M#mod.name || M <- Mods, M#mod.is_included],
App2 = App#app{info = Info#app_info{modules = ModNames}},
Contents = gen_app(App2),
- AppIoList = io_lib:format("%% app generated at ~w ~w\n~p.\n\n",
+ AppIoList = io_lib:format("%% app generated at ~w ~w\n~tp.\n\n",
[date(), time(), Contents]),
- [{write_file, AppFilename, AppIoList}]
+ [{write_file, AppFilename, to_utf8_bin_with_enc_comment(AppIoList)}]
end.
@@ -1285,7 +1290,7 @@ do_eval_spec({archive, Archive, Options, Files},
{ok, _} ->
ok;
{error, Reason} ->
- reltool_utils:throw_error("create archive ~ts failed: ~p",
+ reltool_utils:throw_error("create archive ~ts failed: ~tp",
[ArchiveFile, Reason])
end;
do_eval_spec({copy_file, File}, _OrigSourceDir, SourceDir, TargetDir) ->
@@ -1299,12 +1304,12 @@ do_eval_spec({copy_file, File, OldFile},
SourceFile = filename:join([OrigSourceDir, OldFile]),
TargetFile = filename:join([TargetDir, File]),
reltool_utils:copy_file(SourceFile, TargetFile);
-do_eval_spec({write_file, File, IoList},
+do_eval_spec({write_file, File, Bin},
_OrigSourceDir,
_SourceDir,
TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
- reltool_utils:write_file(TargetFile, IoList);
+ reltool_utils:write_file(TargetFile, Bin);
do_eval_spec({strip_beam, File}, _OrigSourceDir, SourceDir, TargetDir) ->
SourceFile = filename:join([SourceDir, File]),
TargetFile = filename:join([TargetDir, File]),
@@ -1336,7 +1341,7 @@ cleanup_spec({copy_file, File}, TargetDir) ->
cleanup_spec({copy_file, NewFile, _OldFile}, TargetDir) ->
TargetFile = filename:join([TargetDir, NewFile]),
file:delete(TargetFile);
-cleanup_spec({write_file, File, _IoList}, TargetDir) ->
+cleanup_spec({write_file, File, _}, TargetDir) ->
TargetFile = filename:join([TargetDir, File]),
file:delete(TargetFile);
cleanup_spec({strip_beam, File}, TargetDir) ->
@@ -1406,7 +1411,7 @@ do_filter_spec(Path,
ExclRegexps) ->
Path2 = opt_join(Path, NewFile),
match(Path2, InclRegexps, ExclRegexps);
-do_filter_spec(Path, {write_file, File, _IoList}, InclRegexps, ExclRegexps) ->
+do_filter_spec(Path, {write_file, File, _}, InclRegexps, ExclRegexps) ->
Path2 = opt_join(Path, File),
match(Path2, InclRegexps, ExclRegexps);
do_filter_spec(Path, {strip_beam, File}, InclRegexps, ExclRegexps) ->
@@ -1448,7 +1453,7 @@ do_install(RelName, TargetDir) ->
RelDir = filename:join([TargetDir2, "releases"]),
DataFile = filename:join([RelDir, "start_erl.data"]),
Bin = reltool_utils:read_file(DataFile),
- case string:tokens(binary_to_list(Bin), " \n") of
+ case string:lexemes(unicode:characters_to_list(Bin), " \n") of
[ErlVsn, RelVsn | _] ->
ErtsBinDir = filename:join([TargetDir2, "erts-" ++ ErlVsn, "bin"]),
BinDir = filename:join([TargetDir2, "bin"]),
@@ -1501,8 +1506,8 @@ subst_src_script(Script, SrcDir, DestDir, Vars, Opts) ->
subst_file(Src, Dest, Vars, Opts) ->
Bin = reltool_utils:read_file(Src),
- Chars = subst(binary_to_list(Bin), Vars),
- reltool_utils:write_file(Dest, Chars),
+ Chars = subst(unicode:characters_to_list(Bin), Vars),
+ reltool_utils:write_file(Dest, unicode:characters_to_binary(Chars)),
case lists:member(preserve, Opts) of
true ->
FileInfo = reltool_utils:read_file_info(Src),
diff --git a/lib/reltool/src/reltool_utils.erl b/lib/reltool/src/reltool_utils.erl
index 60edc9f3ca..3891b5ae4d 100644
--- a/lib/reltool/src/reltool_utils.erl
+++ b/lib/reltool/src/reltool_utils.erl
@@ -55,7 +55,7 @@ root_dir() ->
code:root_dir().
erl_libs() ->
- string:tokens(os:getenv("ERL_LIBS", ""), ":;").
+ string:lexemes(os:getenv("ERL_LIBS", ""), ":;").
lib_dirs(Dir) ->
case erl_prim_loader:list_dir(Dir) of
@@ -286,7 +286,7 @@ split_app_dir(Dir) ->
{Name, Vsn} = split_app_name(Base),
Vsn2 =
try
- [list_to_integer(N) || N <- string:tokens(Vsn, ".")]
+ [list_to_integer(N) || N <- string:lexemes(Vsn, ".")]
catch
_:_ ->
Vsn
@@ -427,7 +427,7 @@ scroll_size(ObjRef) ->
safe_keysearch(Key, Pos, List, Mod, Line) ->
case lists:keysearch(Key, Pos, List) of
false ->
- io:format("~w(~w): lists:keysearch(~p, ~w, ~p) -> false\n",
+ io:format("~w(~w): lists:keysearch(~tp, ~w, ~tp) -> false\n",
[Mod, Line, Key, Pos, List]),
erlang:error({Mod, Line, lists, keysearch, [Key, Pos, List]});
{value, Val} ->
@@ -498,8 +498,8 @@ read_file(File) ->
throw_error("read file ~ts: ~ts", [File, Text])
end.
-write_file(File, IoList) ->
- case file:write_file(File, IoList) of
+write_file(File, Bin) ->
+ case file:write_file(File, Bin) of
ok ->
ok;
{error, Reason} ->
@@ -601,7 +601,7 @@ do_decode_regexps(Key, [Regexp | Regexps], Acc) ->
Regexps,
[#regexp{source = Regexp, compiled = MP} | Acc]);
_ ->
- Text = lists:flatten(io_lib:format("~p", [{Key, Regexp}])),
+ Text = lists:flatten(io_lib:format("~tp", [{Key, Regexp}])),
throw({error, "Illegal option: " ++ Text})
end;
do_decode_regexps(_Key, [], Acc) ->
diff --git a/lib/runtime_tools/doc/src/notes.xml b/lib/runtime_tools/doc/src/notes.xml
index 2bfc174cae..d50994306b 100644
--- a/lib/runtime_tools/doc/src/notes.xml
+++ b/lib/runtime_tools/doc/src/notes.xml
@@ -32,6 +32,22 @@
<p>This document describes the changes made to the Runtime_Tools
application.</p>
+<section><title>Runtime_Tools 1.12.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ A faulty encoding comment was added when saving trace
+ patterns to file. This is now corrected.</p>
+ <p>
+ Own Id: OTP-14479</p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Runtime_Tools 1.12</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/runtime_tools/src/dbg.erl b/lib/runtime_tools/src/dbg.erl
index e82f27896d..92938ed5c1 100644
--- a/lib/runtime_tools/src/dbg.erl
+++ b/lib/runtime_tools/src/dbg.erl
@@ -268,7 +268,7 @@ wtp(FileName) ->
{error, Reason} ->
{error, Reason};
{ok, File} ->
- io:put_chars(File, "%% coding: utf8\n"),
+ io:format(File, "%% ~s\n", [epp:encoding_to_string(utf8)]),
pt_doforall(fun ({_, Val}, _) when is_list(Val) ->
io:format(File, "~tp.~n", [Val]);
({_, _}, _) ->
diff --git a/lib/runtime_tools/src/observer_backend.erl b/lib/runtime_tools/src/observer_backend.erl
index d36af257ce..1b075a507d 100644
--- a/lib/runtime_tools/src/observer_backend.erl
+++ b/lib/runtime_tools/src/observer_backend.erl
@@ -36,6 +36,7 @@
ttb_write_binary/2,
ttb_stop/1,
ttb_fetch/2,
+ ttb_fetch/3,
ttb_resume_trace/0,
ttb_get_filenames/1]).
-define(CHUNKSIZE,8191). % 8 kbytes - 1 byte
@@ -63,9 +64,7 @@ sys_info() ->
end,
{{_,Input},{_,Output}} = erlang:statistics(io),
- [{process_count, erlang:system_info(process_count)},
- {process_limit, erlang:system_info(process_limit)},
- {uptime, element(1, erlang:statistics(wall_clock))},
+ [{uptime, element(1, erlang:statistics(wall_clock))},
{run_queue, erlang:statistics(run_queue)},
{io_input, Input},
{io_output, Output},
@@ -86,7 +85,17 @@ sys_info() ->
{thread_pool_size, erlang:system_info(thread_pool_size)},
{wordsize_internal, erlang:system_info({wordsize, internal})},
{wordsize_external, erlang:system_info({wordsize, external})},
- {alloc_info, alloc_info()}
+ {alloc_info, alloc_info()},
+ {process_count, erlang:system_info(process_count)},
+ {atom_limit, erlang:system_info(atom_limit)},
+ {atom_count, erlang:system_info(atom_count)},
+ {process_limit, erlang:system_info(process_limit)},
+ {process_count, erlang:system_info(process_count)},
+ {port_limit, erlang:system_info(port_limit)},
+ {port_count, erlang:system_info(port_count)},
+ {ets_limit, erlang:system_info(ets_limit)},
+ {ets_count, length(ets:all())},
+ {dist_buf_busy_limit, erlang:system_info(dist_buf_busy_limit)}
| MemInfo].
alloc_info() ->
@@ -650,22 +659,42 @@ stop_seq_trace() ->
%% Fetch ttb logs from remote node
ttb_fetch(MetaFile,{Port,Host}) ->
+ ttb_fetch(MetaFile,{Port,Host},undefined).
+ttb_fetch(MetaFile,{Port,Host},MasterEnc) ->
erlang:process_flag(priority,low),
Files = ttb_get_filenames(MetaFile),
{ok, Sock} = gen_tcp:connect(Host, Port, [binary, {packet, 2}]),
- send_files({Sock,Host},Files),
+ send_files({Sock,Host},Files,MasterEnc,file:native_name_encoding()),
ok = gen_tcp:close(Sock).
-send_files({Sock,Host},[File|Files]) ->
+send_files({Sock,Host},[File|Files],MasterEnc,MyEnc) ->
{ok,Fd} = file:open(File,[raw,read,binary]),
- ok = gen_tcp:send(Sock,<<1,(list_to_binary(filename:basename(File)))/binary>>),
+ Basename = filename:basename(File),
+ {Code,FilenameBin} = encode_filename(Basename,MasterEnc,MyEnc),
+ ok = gen_tcp:send(Sock,<<Code,FilenameBin/binary>>),
send_chunks(Sock,Fd),
ok = file:delete(File),
- send_files({Sock,Host},Files);
-send_files({_Sock,_Host},[]) ->
+ send_files({Sock,Host},Files,MasterEnc,MyEnc);
+send_files({_Sock,_Host},[],_MasterEnc,_MyEnc) ->
done.
+encode_filename(Basename,undefined,MyEnc) ->
+ %% Compatible with old version of ttb.erl, but no longer crashing
+ %% for code points > 255.
+ {1,unicode:characters_to_binary(Basename,MyEnc,MyEnc)};
+encode_filename(Basename,MasterEnc,MyEnc) ->
+ case unicode:characters_to_binary(Basename,MyEnc,MasterEnc) of
+ Bin when is_binary(Bin) ->
+ %% Encoding succeeded
+ {2,Bin};
+ _ ->
+ %% Can't convert Basename from my encoding to the master
+ %% node's encoding. Doing my best and hoping that master
+ %% node can fix it...
+ {3,unicode:characters_to_binary(Basename,MyEnc,MyEnc)}
+ end.
+
send_chunks(Sock,Fd) ->
case file:read(Fd,?CHUNKSIZE) of
{ok,Bin} ->
diff --git a/lib/runtime_tools/vsn.mk b/lib/runtime_tools/vsn.mk
index 5ee39a25fe..7296221033 100644
--- a/lib/runtime_tools/vsn.mk
+++ b/lib/runtime_tools/vsn.mk
@@ -1 +1 @@
-RUNTIME_TOOLS_VSN = 1.12
+RUNTIME_TOOLS_VSN = 1.12.1
diff --git a/lib/sasl/doc/src/sasl_app.xml b/lib/sasl/doc/src/sasl_app.xml
index 0576397f9b..e0693fcb60 100644
--- a/lib/sasl/doc/src/sasl_app.xml
+++ b/lib/sasl/doc/src/sasl_app.xml
@@ -103,13 +103,16 @@
<tag><c>{file,FileName}</c></tag>
<item><p>Installs <c>sasl_report_file_h</c> in the error logger.
All reports go to file <c>FileName</c>, which is a
- string.</p></item>
+ string. The file is opened in <c>write</c> mode with encoding
+ <c>utf8</c>.</p></item>
<tag><c>{file,FileName,Modes}</c></tag>
<item><p>Same as <c>{file,FileName}</c>, except that <c>Modes</c>
allows you to specify the modes used for opening the <c>FileName</c>
given to the <seealso marker="kernel:file#open/2">file:open/2</seealso>
- call. When not specified, <c>Modes</c> defaults to <c>[write]</c>.
- Use <c>[append]</c> to have the <c>FileName</c> open in append mode.
+ call. By default, the file is opened in <c>write</c> mode
+ with encoding <c>utf8</c>. Use <c>[append]</c> to have
+ the <c>FileName</c> open in append mode. A different
+ encoding can also be specified.
<c>FileName</c> is a string.</p></item>
<tag><c>false</c></tag>
<item><p>No SASL error logger handler is installed.</p></item>
diff --git a/lib/sasl/src/release_handler.erl b/lib/sasl/src/release_handler.erl
index 1f3c6877d5..d0a7c7332d 100644
--- a/lib/sasl/src/release_handler.erl
+++ b/lib/sasl/src/release_handler.erl
@@ -1143,8 +1143,9 @@ new_emulator_make_hybrid_config(CurrentVsn,ToVsn,TmpVsn,RelDir,Masters) ->
Config2 = replace_config(stdlib,Config1,Stdlib),
Config3 = replace_config(sasl,Config2,Sasl),
- ConfigStr = io_lib:format("~p.~n",[Config3]),
- write_file(TmpFile,ConfigStr,Masters).
+ ConfigStr = io_lib:format("%% ~s~n~tp.~n",
+ [epp:encoding_to_string(utf8),Config3]),
+ write_file(TmpFile,unicode:characters_to_binary(ConfigStr),Masters).
%% Take the configuration for application App from the new config and
%% insert in the old config.
@@ -1874,9 +1875,10 @@ write_releases_1(Dir, NewReleases, Masters) ->
write_releases_m(Dir, NewReleases, Masters).
do_write_release(Dir, RELEASES, NewReleases) ->
- case file:open(filename:join(Dir, RELEASES), [write]) of
+ case file:open(filename:join(Dir, RELEASES), [write,{encoding,utf8}]) of
{ok, Fd} ->
- ok = io:format(Fd, "~p.~n", [NewReleases]),
+ ok = io:format(Fd, "%% ~s~n~tp.~n",
+ [epp:encoding_to_string(utf8),NewReleases]),
ok = file:close(Fd);
{error, Reason} ->
{error, Reason}
diff --git a/lib/sasl/src/sasl_report.erl b/lib/sasl/src/sasl_report.erl
index eb454155d5..e6556ec6ce 100644
--- a/lib/sasl/src/sasl_report.erl
+++ b/lib/sasl/src/sasl_report.erl
@@ -47,6 +47,7 @@ io_report(_IO, _Fd, _, _) ->
is_my_error_report(all, Type) -> is_my_error_report(Type);
is_my_error_report(error, Type) -> is_my_error_report(Type);
is_my_error_report(_, _Type) -> false.
+
is_my_error_report(supervisor_report) -> true;
is_my_error_report(crash_report) -> true;
is_my_error_report(_) -> false.
@@ -54,6 +55,7 @@ is_my_error_report(_) -> false.
is_my_info_report(all, Type) -> is_my_info_report(Type);
is_my_info_report(progress, Type) -> is_my_info_report(Type);
is_my_info_report(_, _Type) -> false.
+
is_my_info_report(progress) -> true;
is_my_info_report(_) -> false.
@@ -62,46 +64,65 @@ write_report2(IO, Fd, Head, supervisor_report, Report) ->
Context = sup_get(errorContext, Report),
Reason = sup_get(reason, Report),
Offender = sup_get(offender, Report),
- {FmtString,Args} = supervisor_format([Name,Context,Reason,Offender]),
- write_report_action(IO, Fd, Head, FmtString, Args);
+ Enc = encoding(Fd),
+ {FmtString,Args} = supervisor_format([Name,Context,Reason,Offender], Enc),
+ String = io_lib:format(FmtString, Args),
+ write_report_action(IO, Fd, Head, String);
write_report2(IO, Fd, Head, progress, Report) ->
- Format = format_key_val(Report),
- write_report_action(IO, Fd, Head, "~s", [Format]);
+ Encoding = encoding(Fd),
+ Depth = error_logger:get_format_depth(),
+ String = format_key_val(Report, Encoding, Depth),
+ write_report_action(IO, Fd, Head, String);
write_report2(IO, Fd, Head, crash_report, Report) ->
+ Encoding = encoding(Fd),
Depth = error_logger:get_format_depth(),
- Format = proc_lib:format(Report, latin1, Depth),
- write_report_action(IO, Fd, Head, "~s", [Format]).
-
-supervisor_format(Args0) ->
- case error_logger:get_format_depth() of
- unlimited ->
- {" Supervisor: ~p~n"
- " Context: ~p~n"
- " Reason: ~80.18p~n"
- " Offender: ~80.18p~n~n",
- Args0};
- Depth ->
- [A,B,C,D] = Args0,
- Args = [A,Depth,B,Depth,C,Depth,D,Depth],
- {" Supervisor: ~P~n"
- " Context: ~P~n"
- " Reason: ~80.18P~n"
- " Offender: ~80.18P~n~n",
- Args}
- end.
-
-write_report_action(IO, Fd, Head, Format, Args) ->
- S = [Head|io_lib:format(Format, Args)],
+ String = proc_lib:format(Report, Encoding, Depth),
+ write_report_action(IO, Fd, Head, String).
+
+supervisor_format(Args0, Encoding) ->
+ {P, Tl} = p(Encoding, error_logger:get_format_depth()),
+ [A,B,C,D] = Args0,
+ Args = [A|Tl] ++ [B|Tl] ++ [C|Tl] ++ [D|Tl],
+ {" Supervisor: ~" ++ P ++ "\n"
+ " Context: ~" ++ P ++ "\n"
+ " Reason: ~80.18" ++ P ++ "\n"
+ " Offender: ~80.18" ++ P ++ "\n~n",
+ Args}.
+
+write_report_action(IO, Fd, Head, String) ->
+ S = [Head|String],
case IO of
io -> io:put_chars(Fd, S);
io_lib -> S
end.
-format_key_val([{Tag,Data}|Rep]) ->
- io_lib:format(" ~16w: ~p~n",[Tag,Data]) ++ format_key_val(Rep);
-format_key_val(_) ->
+format_key_val(Rep, Encoding, Depth) ->
+ {P, Tl} = p(Encoding, Depth),
+ format_key_val1(Rep, P, Tl).
+
+format_key_val1([{Tag,Data}|Rep], P, Tl) ->
+ (io_lib:format(" ~16w: ~" ++ P ++ "\n", [Tag, Data|Tl]) ++
+ format_key_val1(Rep, P, Tl));
+format_key_val1(_, _, _) ->
[].
+p(Encoding, Depth) ->
+ {Letter, Tl} = case Depth of
+ unlimited -> {"p", []};
+ _ -> {"P", [Depth]}
+ end,
+ P = modifier(Encoding) ++ Letter,
+ {P, Tl}.
+
+encoding(IO) ->
+ case lists:keyfind(encoding, 1, io:getopts(IO)) of
+ false -> latin1;
+ {encoding, Enc} -> Enc
+ end.
+
+modifier(latin1) -> "";
+modifier(_) -> "t".
+
sup_get(Tag, Report) ->
case lists:keysearch(Tag, 1, Report) of
{value, {_, Value}} ->
diff --git a/lib/sasl/src/sasl_report_file_h.erl b/lib/sasl/src/sasl_report_file_h.erl
index 21746839fa..d3b5c7dc0d 100644
--- a/lib/sasl/src/sasl_report_file_h.erl
+++ b/lib/sasl/src/sasl_report_file_h.erl
@@ -29,15 +29,27 @@
handle_event/2, handle_call/2, handle_info/2,
terminate/2]).
-init({File, Modes, Type}) when is_list(Modes) ->
+init({File, Modes0, Type}) when is_list(Modes0) ->
process_flag(trap_exit, true),
+ Modes1 =
+ case lists:keymember(encoding,1,Modes0) of
+ true -> Modes0;
+ false -> [{encoding,utf8}|Modes0]
+ end,
+ Modes =
+ case [M || M <- Modes1, lists:member(M,[write,append,exclusive])] of
+ [] ->
+ [write|Modes1];
+ _ ->
+ Modes1
+ end,
case file:open(File, Modes) of
{ok,Fd} ->
{ok, {Fd, File, Type}};
What ->
What
end.
-
+
handle_event({_Type, GL, _Msg}, State) when node(GL) /= node() ->
{ok, State};
handle_event(Event, {Fd, File, Type}) ->
diff --git a/lib/sasl/src/systools_make.erl b/lib/sasl/src/systools_make.erl
index b1523dcbb7..391b1fb5cc 100644
--- a/lib/sasl/src/systools_make.erl
+++ b/lib/sasl/src/systools_make.erl
@@ -1152,10 +1152,10 @@ generate_script(Output, Release, Appls, Flags) ->
},
ScriptFile = Output ++ ".script",
- case file:open(ScriptFile, [write]) of
+ case file:open(ScriptFile, [write,{encoding,utf8}]) of
{ok, Fd} ->
- io:format(Fd, "%% script generated at ~w ~w\n~p.\n",
- [date(), time(), Script]),
+ io:format(Fd, "%% ~s\n%% script generated at ~w ~w\n~tp.\n",
+ [epp:encoding_to_string(utf8), date(), time(), Script]),
case file:close(Fd) of
ok ->
BootFile = Output ++ ".boot",
diff --git a/lib/sasl/src/systools_relup.erl b/lib/sasl/src/systools_relup.erl
index 706ae7d631..e836d57670 100644
--- a/lib/sasl/src/systools_relup.erl
+++ b/lib/sasl/src/systools_relup.erl
@@ -535,9 +535,9 @@ to_list(X) when is_list(X) -> X.
write_relup_file(Relup, Opts) ->
Filename = filename:join(filename:absname(get_opt(outdir,Opts)),
"relup"),
- case file:open(Filename, [write]) of
+ case file:open(Filename, [write,{encoding,utf8}]) of
{ok, Fd} ->
- io:format(Fd, "~p.~n", [Relup]),
+ io:format(Fd, "%% ~s~n~tp.~n", [epp:encoding_to_string(utf8),Relup]),
case file:close(Fd) of
ok -> ok;
{error,Reason} ->
diff --git a/lib/sasl/test/release_handler_SUITE.erl b/lib/sasl/test/release_handler_SUITE.erl
index 7093158502..50932e89e4 100644
--- a/lib/sasl/test/release_handler_SUITE.erl
+++ b/lib/sasl/test/release_handler_SUITE.erl
@@ -66,7 +66,7 @@ cases() ->
supervisor_which_children_timeout,
release_handler_which_releases, install_release_syntax_check,
upgrade_supervisor, upgrade_supervisor_fail, otp_9864,
- otp_10463_upgrade_script_regexp, no_dot_erlang].
+ otp_10463_upgrade_script_regexp, no_dot_erlang, unicode_upgrade].
groups() ->
[{release,[],
@@ -1875,6 +1875,86 @@ no_dot_erlang(Conf) ->
ok
end.
+%%%-----------------------------------------------------------------
+%%% Test unicode handling. Make sure that release name, application
+%%% description, and application environment variables may contain
+%%% unicode characters.
+unicode_upgrade(Conf) ->
+ %% Set some paths
+ DataDir = ?config(data_dir, Conf),
+ PrivDir = priv_dir(Conf),
+ Dir = filename:join(PrivDir,"unicode"),
+ LibDir0 = filename:join(DataDir, "unicode"),
+ LibDir =
+ case {file:native_name_encoding(),os:type()} of
+ {utf8,{Os,_}} when Os =/= win32 ->
+ LD = filename:join(DataDir,"unicode_αβ"),
+ file:make_symlink("unicode",LD),
+ LD;
+ _ ->
+ LibDir0
+ end,
+
+ %% Create the releases
+ RelName = "unicode_rel_αβ",
+ Rel1 = create_and_install_fake_first_release(Dir,{RelName,"1"},
+ [{u,"1.0",LibDir}]),
+ Rel2 = create_fake_upgrade_release(Dir,
+ {RelName,"2"},
+ [{u,"1.1",LibDir}],
+ {[Rel1],[Rel1],[LibDir]}),
+ Rel1Dir = filename:dirname(Rel1),
+ Rel2Dir = filename:dirname(Rel2),
+
+ %% Start a slave node
+ {ok, Node} = t_start_node(unicode_upgrade, Rel1,
+ filename:join(Rel1Dir,"sys.config"), "+pc unicode"),
+
+ %% Check
+ Dir1 = filename:join([LibDir, "u-1.0"]),
+ Dir1 = rpc:call(Node, code, lib_dir, [u]),
+ UBeam1 = filename:join([Dir1,"ebin","u.beam"]),
+ UBeam1 = rpc:call(Node,code,which,[u]),
+ {RelName,"1"} = rpc:call(Node,init,script_id,[]),
+ {Env,state} = rpc:call(Node,u,u,[]),
+ 'val_αβ' = proplists:get_value('key_αβ',Env),
+ [{RelName,"1",_,permanent}|_] =
+ rpc:call(Node,release_handler,which_releases,[]),
+ {ok,ReleasesDir} = rpc:call(Node,application,get_env,[sasl,releases_dir]),
+ {ok,[[{release,RelName,"1",_,_,permanent}|_]]} =
+ file:consult(filename:join(ReleasesDir,"RELEASES")),
+
+ %% Install second release
+ {ok, RelVsn2} =
+ rpc:call(Node, release_handler, set_unpacked,
+ [Rel2++".rel", [{u,"1.1",LibDir}]]),
+ ok = rpc:call(Node, release_handler, install_file,
+ [RelVsn2, filename:join(Rel2Dir, "relup")]),
+ ok = rpc:call(Node, release_handler, install_file,
+ [RelVsn2, filename:join(Rel2Dir, "start.boot")]),
+ ok = rpc:call(Node, release_handler, install_file,
+ [RelVsn2, filename:join(Rel2Dir, "sys.config")]),
+
+ {ok, _RelVsn1, []} =
+ rpc:call(Node, release_handler, install_release, [RelVsn2]),
+
+ %% And check
+ Dir2 = filename:join([LibDir, "u-1.1"]),
+ Dir2 = rpc:call(Node, code, lib_dir, [u]),
+ UBeam2 = filename:join([Dir2,"ebin","u.beam"]),
+ {file,UBeam2} = rpc:call(Node,code,is_loaded,[u]),
+ {RelName,"1"} = rpc:call(Node,init,script_id,[]),
+ {Env,{state,'αβ'}} = rpc:call(Node,u,u,[]),
+ [{RelName,"2",_,current}|_] =
+ rpc:call(Node,release_handler,which_releases,[]),
+ {ok,ReleasesDir2} = rpc:call(Node,application,get_env,[sasl,releases_dir]),
+ {ok,<<"%% coding: utf-8\n[{release,\"unicode_rel_αβ\",\"2\""/utf8,_/binary>>}=
+ file:read_file(filename:join(ReleasesDir2,"RELEASES")),
+ ok.
+
+unicode_upgrade(cleanup,_Conf) ->
+ stop_node(node_name(unicode_upgrade)).
+
%%%=================================================================
%%% Misceleaneous functions
@@ -2002,6 +2082,8 @@ are_names_reg_gg(Node, Names, N) ->
t_start_node(Name, Boot, SysConfig) ->
+ t_start_node(Name, Boot, SysConfig, "").
+t_start_node(Name, Boot, SysConfig, ArgStr) ->
Args =
case Boot of
[] -> [];
@@ -2010,8 +2092,9 @@ t_start_node(Name, Boot, SysConfig) ->
case SysConfig of
[] -> [];
_ -> " -config " ++ SysConfig
- end,
- test_server:start_node(Name, slave, [{args, Args}]).
+ end ++
+ " " ++ ArgStr,
+ test_server:start_node(Name, peer, [{args, Args}]).
stop_node(Node) ->
?t:stop_node(Node).
@@ -2460,7 +2543,9 @@ create_rel_file(RelFile,RelName,RelVsn,Erts,ExtraApps) ->
%% Insert a term in a file, which can be read with file:consult/1.
write_term_file(File,Term) ->
- ok = file:write_file(File,io_lib:format("~p.~n",[Term])).
+ Str = io_lib:format("%% ~s~n~tp.~n",[epp:encoding_to_string(utf8),Term]),
+ Bin = unicode:characters_to_binary(Str),
+ ok = file:write_file(File,Bin).
%% Check that global group info is correct - try again for a maximum of 5 sec
@@ -2719,8 +2804,8 @@ cover_fun(Node,Func) ->
%% and possibly other applications if they are listed in AppDirs =
%% [{App,Vsn,LibDir}]
create_and_install_fake_first_release(Dir,AppDirs) ->
- %% Create the first release
- {RelName,RelVsn} = init:script_id(),
+ create_and_install_fake_first_release(Dir,init:script_id(),AppDirs).
+create_and_install_fake_first_release(Dir,{RelName,RelVsn},AppDirs) ->
{Rel,_} = create_fake_release(Dir,RelName,RelVsn,AppDirs),
ReleasesDir = filename:join(Dir, "releases"),
RelDir = filename:dirname(Rel),
@@ -2744,9 +2829,11 @@ create_and_install_fake_first_release(Dir,AppDirs) ->
%% be upgraded to from the release created by
%% create_and_install_fake_first_release/2. Unpack first by calls to
%% release_handler:set_unpacked and release_handler:install_file.
-create_fake_upgrade_release(Dir,RelVsn,AppDirs,{UpFrom,DownTo,ExtraLibs}) ->
- %% Create a new release
+create_fake_upgrade_release(Dir,RelVsn,AppDirs,UpgrInstr) when not is_tuple(RelVsn) ->
{RelName,_} = init:script_id(),
+ create_fake_upgrade_release(Dir,{RelName,RelVsn},AppDirs,UpgrInstr);
+create_fake_upgrade_release(Dir,{RelName,RelVsn},AppDirs,{UpFrom,DownTo,ExtraLibs}) ->
+ %% Create a new release
{Rel,Paths} = create_fake_release(Dir,RelName,RelVsn,AppDirs),
RelDir = filename:dirname(Rel),
diff --git a/lib/sasl/test/release_handler_SUITE_data/Makefile.src b/lib/sasl/test/release_handler_SUITE_data/Makefile.src
index b794aa0e6f..113d3e2290 100644
--- a/lib/sasl/test/release_handler_SUITE_data/Makefile.src
+++ b/lib/sasl/test/release_handler_SUITE_data/Makefile.src
@@ -76,7 +76,13 @@ SUP= \
release_handler_timeouts/dummy-0.1/ebin/dummy_sup.@EMULATOR@ \
release_handler_timeouts/dummy-0.1/ebin/dummy_sup_2.@EMULATOR@
-all: $(LIB) $(APP) $(OTP2740) $(C) $(SUP)
+UNICODE= \
+ unicode/u-1.0/ebin/u.@EMULATOR@ \
+ unicode/u-1.0/ebin/u_sup.@EMULATOR@ \
+ unicode/u-1.1/ebin/u.@EMULATOR@ \
+ unicode/u-1.1/ebin/u_sup.@EMULATOR@
+
+all: $(LIB) $(APP) $(OTP2740) $(C) $(SUP) $(UNICODE)
lib/a-1.0/ebin/a.@EMULATOR@: lib/a-1.0/src/a.erl
erlc $(EFLAGS) -olib/a-1.0/ebin lib/a-1.0/src/a.erl
@@ -236,3 +242,13 @@ release_handler_timeouts/dummy-0.1/ebin/dummy_sup.@EMULATOR@: release_handler_ti
erlc $(EFLAGS) -orelease_handler_timeouts/dummy-0.1/ebin release_handler_timeouts/dummy-0.1/src/dummy_sup.erl
release_handler_timeouts/dummy-0.1/ebin/dummy_sup_2.@EMULATOR@: release_handler_timeouts/dummy-0.1/src/dummy_sup_2.erl
erlc $(EFLAGS) -orelease_handler_timeouts/dummy-0.1/ebin release_handler_timeouts/dummy-0.1/src/dummy_sup_2.erl
+
+unicode/u-1.0/ebin/u.@EMULATOR@: unicode/u-1.0/src/u.erl
+ erlc $(EFLAGS) -ounicode/u-1.0/ebin unicode/u-1.0/src/u.erl
+unicode/u-1.0/ebin/u_sup.@EMULATOR@: unicode/u-1.0/src/u_sup.erl
+ erlc $(EFLAGS) -ounicode/u-1.0/ebin unicode/u-1.0/src/u_sup.erl
+
+unicode/u-1.1/ebin/u.@EMULATOR@: unicode/u-1.1/src/u.erl
+ erlc $(EFLAGS) -ounicode/u-1.1/ebin unicode/u-1.1/src/u.erl
+unicode/u-1.1/ebin/u_sup.@EMULATOR@: unicode/u-1.1/src/u_sup.erl
+ erlc $(EFLAGS) -ounicode/u-1.1/ebin unicode/u-1.1/src/u_sup.erl
diff --git a/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/ebin/u.app b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/ebin/u.app
new file mode 100644
index 0000000000..fea4f9992e
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/ebin/u.app
@@ -0,0 +1,8 @@
+{application, u,
+ [{description, "This app shall test unicode handling αβ"},
+ {vsn, "1.0"},
+ {modules, [u, u_sup]},
+ {registered, [u_sup]},
+ {applications, [kernel, stdlib]},
+ {env, [{'key_αβ', 'val_αβ'}]},
+ {mod, {u_sup, []}}]}.
diff --git a/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/src/u.erl b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/src/u.erl
new file mode 100644
index 0000000000..45fe098c0e
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/src/u.erl
@@ -0,0 +1,50 @@
+%% ``Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id$
+%%
+-module(u).
+
+
+-behaviour(gen_server).
+
+-vsn(1).
+
+%% External exports
+-export([start_link/0, u/0]).
+%% Internal exports
+-export([init/1, handle_call/3, handle_info/2, terminate/2]).
+
+start_link() -> gen_server:start_link({local, uu}, u, [], []).
+
+u() -> gen_server:call(uu, u).
+
+%%-----------------------------------------------------------------
+%% Callback functions from gen_server
+%%-----------------------------------------------------------------
+init([]) ->
+ process_flag(trap_exit, true),
+ {ok, state}.
+
+handle_call(u, _From, State) ->
+ X = application:get_all_env(u),
+ {reply, {X,State}, State}.
+
+handle_info(_, State) ->
+ {noreply, State}.
+
+terminate(_Reason, _State) ->
+ ok.
diff --git a/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/src/u_sup.erl b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/src/u_sup.erl
new file mode 100644
index 0000000000..b0d4a7b58f
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.0/src/u_sup.erl
@@ -0,0 +1,38 @@
+%% ``Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id$
+%%
+-module(u_sup).
+
+
+-behaviour(supervisor).
+
+%% External exports
+-export([start/2]).
+
+%% Internal exports
+-export([init/1]).
+
+start(_, _) ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+ SupFlags = {one_for_one, 4, 3600},
+ Config = {u,
+ {u, start_link, []},
+ permanent, 2000, worker, [u]},
+ {ok, {SupFlags, [Config]}}.
diff --git a/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/ebin/u.app b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/ebin/u.app
new file mode 100644
index 0000000000..8fcc3bba42
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/ebin/u.app
@@ -0,0 +1,8 @@
+{application, u,
+ [{description, "This app shall test unicode handling αβ"},
+ {vsn, "1.1"},
+ {modules, [u, u_sup]},
+ {registered, [u_sup]},
+ {applications, [kernel, stdlib]},
+ {env, [{'key_αβ', 'val_αβ'}]},
+ {mod, {u_sup, []}}]}.
diff --git a/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/ebin/u.appup b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/ebin/u.appup
new file mode 100644
index 0000000000..0344ce92ab
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/ebin/u.appup
@@ -0,0 +1,3 @@
+{"1.1",
+ [{"1.0",[{update,u,{advanced,'αβ'}}]}],
+ [{"1.0",[{update,u,{advanced,'αβ'}}]}]}.
diff --git a/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/src/u.erl b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/src/u.erl
new file mode 100644
index 0000000000..d2544d6fc1
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/src/u.erl
@@ -0,0 +1,55 @@
+%% ``Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id$
+%%
+-module(u).
+
+
+-behaviour(gen_server).
+
+-vsn(1).
+
+%% External exports
+-export([start_link/0, u/0]).
+%% Internal exports
+-export([init/1, handle_call/3, handle_info/2, terminate/2, code_change/3]).
+
+start_link() -> gen_server:start_link({local, uu}, u, [], []).
+
+u() -> gen_server:call(uu, u).
+
+%%-----------------------------------------------------------------
+%% Callback functions from gen_server
+%%-----------------------------------------------------------------
+init([]) ->
+ process_flag(trap_exit, true),
+ {ok, {state,'αβ'}}.
+
+handle_call(u, _From, State) ->
+ X = application:get_all_env(u),
+ {reply, {X,State}, State}.
+
+handle_info(_, State) ->
+ {noreply, State}.
+
+terminate(_Reason, _State) ->
+ ok.
+
+code_change({down,_}, {State,_}, _Extra) ->
+ {ok, State};
+code_change(_, State, Extra) ->
+ {ok, {State, Extra}}.
diff --git a/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/src/u_sup.erl b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/src/u_sup.erl
new file mode 100644
index 0000000000..b0d4a7b58f
--- /dev/null
+++ b/lib/sasl/test/release_handler_SUITE_data/unicode/u-1.1/src/u_sup.erl
@@ -0,0 +1,38 @@
+%% ``Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% The Initial Developer of the Original Code is Ericsson Utvecklings AB.
+%% Portions created by Ericsson are Copyright 1999, Ericsson Utvecklings
+%% AB. All Rights Reserved.''
+%%
+%% $Id$
+%%
+-module(u_sup).
+
+
+-behaviour(supervisor).
+
+%% External exports
+-export([start/2]).
+
+%% Internal exports
+-export([init/1]).
+
+start(_, _) ->
+ supervisor:start_link({local, ?MODULE}, ?MODULE, []).
+
+init([]) ->
+ SupFlags = {one_for_one, 4, 3600},
+ Config = {u,
+ {u, start_link, []},
+ permanent, 2000, worker, [u]},
+ {ok, {SupFlags, [Config]}}.
diff --git a/lib/sasl/test/sasl_report_SUITE.erl b/lib/sasl/test/sasl_report_SUITE.erl
index 53fb614921..92df5e6e40 100644
--- a/lib/sasl/test/sasl_report_SUITE.erl
+++ b/lib/sasl/test/sasl_report_SUITE.erl
@@ -20,7 +20,7 @@
-module(sasl_report_SUITE).
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
--export([gen_server_crash/1]).
+-export([gen_server_crash/1, gen_server_crash_unicode/1]).
-export([crash_me/0,start_link/0,init/1,handle_cast/2,terminate/2]).
@@ -29,7 +29,7 @@
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [gen_server_crash].
+ [gen_server_crash, gen_server_crash_unicode].
groups() ->
[].
@@ -47,8 +47,14 @@ end_per_group(_GroupName, Config) ->
Config.
gen_server_crash(Config) ->
+ gen_server_crash(Config, latin1).
+
+gen_server_crash_unicode(Config) ->
+ gen_server_crash(Config, unicode).
+
+gen_server_crash(Config, Encoding) ->
try
- do_gen_server_crash(Config)
+ do_gen_server_crash(Config, Encoding)
after
error_logger:tty(true),
ok = application:unset_env(sasl, sasl_error_logger),
@@ -57,7 +63,7 @@ gen_server_crash(Config) ->
end,
ok.
-do_gen_server_crash(Config) ->
+do_gen_server_crash(Config, Encoding) ->
PrivDir = ?config(priv_dir, Config),
LogDir = filename:join(PrivDir, ?MODULE),
KernelLog = filename:join(LogDir, "kernel.log"),
@@ -67,7 +73,8 @@ do_gen_server_crash(Config) ->
error_logger:delete_report_handler(cth_log_redirect),
error_logger:tty(false),
application:stop(sasl),
- ok = application:set_env(sasl, sasl_error_logger, {file,SaslLog},
+ Modes = [write, {encoding, Encoding}],
+ ok = application:set_env(sasl, sasl_error_logger, {file,SaslLog,Modes},
[{persistent,true}]),
application:set_env(kernel, error_logger_format_depth, 30),
error_logger:logfile({open,KernelLog}),
@@ -78,16 +85,21 @@ do_gen_server_crash(Config) ->
error_logger:logfile(close),
- check_file(KernelLog, 70000, 150000),
- check_file(SaslLog, 100000, 150000),
+ check_file(KernelLog, utf8, 70000, 150000),
+ check_file(SaslLog, Encoding, 70000, 150000),
+ %% ok = file:delete(KernelLog),
+ %% ok = file:delete(SaslLog),
ok.
-check_file(File, Min, Max) ->
+check_file(File, Encoding, Min, Max) ->
{ok,Bin} = file:read_file(File),
Base = filename:basename(File),
io:format("*** Contents of ~s ***\n", [Base]),
- io:put_chars([Bin,"\n"]),
+ case Encoding of
+ latin1 -> io:format("~s\n", [Bin]);
+ _ -> io:format("~ts\n", [Bin])
+ end,
Sz = byte_size(Bin),
io:format("Size: ~p (allowed range is ~p..~p)\n",
[Sz,Min,Max]),
@@ -110,7 +122,9 @@ crash_me() ->
{ok,SuperPid} = supervisor:start_link(sasl_report_suite_supervisor, []),
[{Id,Pid,_,_}] = supervisor:which_children(SuperPid),
HugeData = gb_sets:from_list(lists:seq(1, 100000)),
- gen_server:cast(Pid, HugeData),
+ SomeData1 = list_to_atom([246]),
+ SomeData2 = list_to_atom([1024]),
+ gen_server:cast(Pid, {HugeData,SomeData1,SomeData2}),
Ref = monitor(process, Pid),
receive
{'DOWN',Ref,process,Pid,_} ->
@@ -129,6 +143,12 @@ init(_) ->
handle_cast(Big, St) ->
Seq = lists:seq(1, 10000),
+ Latin1Atom = list_to_atom([246]),
+ UnicodeAtom = list_to_atom([1024]),
+ put(Latin1Atom, Latin1Atom),
+ put(UnicodeAtom, UnicodeAtom),
+ self() ! Latin1Atom,
+ self() ! UnicodeAtom,
self() ! Seq,
self() ! Seq,
self() ! Seq,
diff --git a/lib/ssh/doc/src/Makefile b/lib/ssh/doc/src/Makefile
index a759854da4..adbda5a030 100644
--- a/lib/ssh/doc/src/Makefile
+++ b/lib/ssh/doc/src/Makefile
@@ -53,7 +53,8 @@ XML_PART_FILES = part_notes.xml \
XML_CHAPTER_FILES = notes.xml \
introduction.xml \
ssh_protocol.xml \
- using_ssh.xml
+ using_ssh.xml \
+ configure_algos.xml
BOOK_FILES = book.xml
diff --git a/lib/ssh/doc/src/configure_algos.xml b/lib/ssh/doc/src/configure_algos.xml
new file mode 100644
index 0000000000..dd60324851
--- /dev/null
+++ b/lib/ssh/doc/src/configure_algos.xml
@@ -0,0 +1,428 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!DOCTYPE chapter SYSTEM "chapter.dtd">
+
+<chapter>
+ <header>
+ <copyright>
+ <year>2017</year>
+ <year>2017</year>
+ <holder>Ericsson AB. All Rights Reserved.</holder>
+ </copyright>
+ <legalnotice>
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+ </legalnotice>
+
+ <title>Configuring algorithms in SSH</title>
+ <prepared></prepared>
+ <docno></docno>
+ <approved></approved>
+ <date></date>
+ <rev></rev>
+ <file>configure_algos.xml</file>
+ </header>
+
+ <section>
+ <marker id="introduction"/>
+ <title>Introduction</title>
+ <p>To fully understand how to configure the algorithms, it is essential to have a basic understanding of the SSH protocol
+ and how OTP SSH app handles the corresponding items</p>
+
+ <p>The first subsection will give a short background of the SSH protocol while later sections describes
+ the implementation and provides some examples</p>
+
+ <section>
+ <title>Basics of the ssh protocol's algorithms handling</title>
+
+ <p>SSH uses different sets of algorithms in different phases of a session. Which
+ algorithms to use is negotiated by the client and the server at the beginning of a session.
+ See <url href="https://tools.ietf.org/html/rfc4253">RFC 4253</url>,
+ "The Secure Shell (SSH) Transport Layer Protocol" for details.
+ </p>
+
+ <p>The negotiation is simple: both peers sends their list of supported alghorithms to the other part.
+ The first algorithm on the client's list that also in on the server's list is selected. So it is the
+ client's orderering of the list that gives the priority for the algorithms.</p>
+
+ <p>There are five lists exchanged in the connection setup. Three of them are also divided in two
+ directions, to and from the server.</p>
+
+ <p>The lists are (named as in the SSH application's options):</p>
+ <taglist>
+ <tag><c>kex</c></tag>
+ <item>
+ <p>Key exchange.</p>
+ <p>An algorithm is selected for computing a secret encryption key. Among examples are:
+ the old nowadays week <c>'diffie-hellman-group-exchange-sha1'</c> and the very strong and modern
+ <c>'ecdh-sha2-nistp512'</c>.</p>
+ </item>
+
+ <tag><c>public_key</c></tag>
+ <item>
+ <p>Server host key</p>
+ <p>The asymetric encryption algorithm used in the server's private-public host key pair.
+ Examples include the well-known RSA <c>'ssh-rsa'</c> and elliptic curve <c>'ecdsa-sha2-nistp521'</c>.
+ </p>
+ </item>
+
+ <tag><c>cipher</c></tag>
+ <item>
+ <p>Symetric cipher algorithm used for the payload encryption. This algorithm will use the key calculated
+ in the kex phase (together with other info) to genereate the actual key used. Examples are
+ tripple-DES <c>'3des-cbc'</c> and one of many AES variants <c>'aes192-ctr'</c>.
+ </p>
+ <p>This list is actually two - one for each direction server-to-client and client-to-server. Therefore it
+ is possible but rare to have different algorithms in the two directions in one connection.</p>
+ </item>
+
+ <tag><c>mac</c></tag>
+ <item>
+ <p>Message authentication code</p>
+ <p>"Check sum" of each message sent between the peers. Examples are SHA <c>'hmac-sha1'</c> and
+ SHA2 <c>'hmac-sha2-512'</c>.</p>
+ <p>This list is also divided into two for the both directions</p>
+ </item>
+
+ <tag><c>compression</c></tag>
+ <item>
+ <p>If and how to compress the message. Examples are <c>none</c>, that is, no compression and
+ <c>zlib</c>.</p>
+ <p>This list is also divided into two for the both directions</p>
+ </item>
+
+ </taglist>
+ </section>
+
+ <section>
+ <title>The SSH app's mechanism</title>
+ <p>The set of algorithms that the SSH app uses by default depends on the algoritms supported by the:</p>
+ <list>
+ <item><p><seealso marker="crypto:crypto">crypto</seealso> app,</p>
+ </item>
+ <item><p>The cryptolib OTP is linked with, usally the one the OS uses, probably OpenSSL,</p>
+ </item>
+ <item><p>and finaly what the SSH app implements</p>
+ </item>
+ </list>
+ <p>Due to this, it impossible to list in documentation what algorithms that are available in a certain installation.</p>
+ <p>There is an important command to list the actual algorithms and their ordering:
+ <seealso marker="ssh#default_algorithms-0">ssh:default_algorithms/0</seealso>.</p>
+ <code type="erl">
+0> ssh:default_algorithms().
+[{kex,['ecdh-sha2-nistp384','ecdh-sha2-nistp521',
+ 'ecdh-sha2-nistp256','diffie-hellman-group-exchange-sha256',
+ 'diffie-hellman-group16-sha512',
+ 'diffie-hellman-group18-sha512',
+ 'diffie-hellman-group14-sha256',
+ 'diffie-hellman-group14-sha1',
+ 'diffie-hellman-group-exchange-sha1']},
+ {public_key,['ecdsa-sha2-nistp384','ecdsa-sha2-nistp521',
+ 'ecdsa-sha2-nistp256','ssh-rsa','rsa-sha2-256',
+ 'rsa-sha2-512','ssh-dss']},
+ {cipher,[{client2server,['[email protected]',
+ 'aes256-ctr','aes192-ctr','[email protected]',
+ 'aes128-ctr','aes128-cbc','3des-cbc']},
+ {server2client,['[email protected]','aes256-ctr',
+ 'aes192-ctr','[email protected]','aes128-ctr',
+ 'aes128-cbc','3des-cbc']}]},
+ {mac,[{client2server,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']},
+ {server2client,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']}]},
+ {compression,[{client2server,[none,'[email protected]',zlib]},
+ {server2client,[none,'[email protected]',zlib]}]}]
+
+ </code>
+ <p>To change the algorithm list, there are two options which can be used in
+ <seealso marker="ssh#connect-3">ssh:connect/2,3,4</seealso>
+ and
+ <seealso marker="ssh#daemon-2">ssh:daemon/2,3</seealso>. The options could of course
+ be used in all other functions that initiates connections.</p>
+
+ <p>The options are <c>preferred_algorithms</c> and <c>modify_algorithms</c>. The first one
+ replaces the default set, while the latter modifies the default set.</p>
+ </section>
+ </section>
+
+ <section>
+ <title>Replacing the default set: preferred_algorithms</title>
+ <p>See the <seealso marker="ssh#option_preferred_algorithms">Reference Manual</seealso> for details</p>
+
+ <p>Here follows a series of examples ranging from simple to more complex.</p>
+
+ <p>To forsee the effect of an option there is an experimental function <c>ssh:chk_algos_opts(Opts)</c>.
+ It mangles the options <c>preferred_algorithms</c>
+ and <c>modify_algorithms</c> in the same way as <c>ssh:dameon</c>, <c>ssh:connect</c> and their friends does.</p>
+
+ <section>
+ <title>Example 1</title>
+ <p>Replace the kex algorithms list with the single algorithm <c>'diffie-hellman-group14-sha256'</c>:</p>
+ <code>
+1> ssh:chk_algos_opts(
+ [{preferred_algorithms,
+ [{kex, ['diffie-hellman-group14-sha256']}
+ ]
+ }
+ ]).
+[{kex,['diffie-hellman-group14-sha256']},
+ {public_key,['ecdsa-sha2-nistp384','ecdsa-sha2-nistp521',
+ 'ecdsa-sha2-nistp256','ssh-rsa','rsa-sha2-256',
+ 'rsa-sha2-512','ssh-dss']},
+ {cipher,[{client2server,['[email protected]',
+ 'aes256-ctr','aes192-ctr','[email protected]',
+ 'aes128-ctr','aes128-cbc','3des-cbc']},
+ {server2client,['[email protected]','aes256-ctr',
+ 'aes192-ctr','[email protected]','aes128-ctr',
+ 'aes128-cbc','3des-cbc']}]},
+ {mac,[{client2server,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']},
+ {server2client,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']}]},
+ {compression,[{client2server,[none,'[email protected]',zlib]},
+ {server2client,[none,'[email protected]',zlib]}]}]
+ </code>
+ <p>Note that the unmentioned lists (<c>public_key</c>, <c>cipher</c>, <c>mac</c> and <c>compression</c>)
+ are un-changed.</p>
+ </section>
+
+ <section>
+ <title>Example 2</title>
+ <p>In the lists that are divided in two for the two directions (c.f <c>cipher</c>) it is possible
+ to change both directions at once:</p>
+ <code>
+2> ssh:chk_algos_opts(
+ [{preferred_algorithms,
+ [{cipher,['aes128-ctr']}
+ ]
+ }
+ ]).
+[{kex,['ecdh-sha2-nistp384','ecdh-sha2-nistp521',
+ 'ecdh-sha2-nistp256','diffie-hellman-group-exchange-sha256',
+ 'diffie-hellman-group16-sha512',
+ 'diffie-hellman-group18-sha512',
+ 'diffie-hellman-group14-sha256',
+ 'diffie-hellman-group14-sha1',
+ 'diffie-hellman-group-exchange-sha1']},
+ {public_key,['ecdsa-sha2-nistp384','ecdsa-sha2-nistp521',
+ 'ecdsa-sha2-nistp256','ssh-rsa','rsa-sha2-256',
+ 'rsa-sha2-512','ssh-dss']},
+ {cipher,[{client2server,['aes128-ctr']},
+ {server2client,['aes128-ctr']}]},
+ {mac,[{client2server,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']},
+ {server2client,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']}]},
+ {compression,[{client2server,[none,'[email protected]',zlib]},
+ {server2client,[none,'[email protected]',zlib]}]}]
+ </code>
+ <p>Note that both lists in <c>cipher</c> has been changed to the provided value (<c>'aes128-ctr'</c>).</p>
+ </section>
+
+ <section>
+ <title>Example 3</title>
+ <p>In the lists that are divided in two for the two directions (c.f <c>cipher</c>) it is possible
+ to change only one of the directions:</p>
+ <code>
+3> ssh:chk_algos_opts(
+ [{preferred_algorithms,
+ [{cipher,[{client2server,['aes128-ctr']}]}
+ ]
+ }
+ ]).
+[{kex,['ecdh-sha2-nistp384','ecdh-sha2-nistp521',
+ 'ecdh-sha2-nistp256','diffie-hellman-group-exchange-sha256',
+ 'diffie-hellman-group16-sha512',
+ 'diffie-hellman-group18-sha512',
+ 'diffie-hellman-group14-sha256',
+ 'diffie-hellman-group14-sha1',
+ 'diffie-hellman-group-exchange-sha1']},
+ {public_key,['ecdsa-sha2-nistp384','ecdsa-sha2-nistp521',
+ 'ecdsa-sha2-nistp256','ssh-rsa','rsa-sha2-256',
+ 'rsa-sha2-512','ssh-dss']},
+ {cipher,[{client2server,['aes128-ctr']},
+ {server2client,['[email protected]','aes256-ctr',
+ 'aes192-ctr','[email protected]','aes128-ctr',
+ 'aes128-cbc','3des-cbc']}]},
+ {mac,[{client2server,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']},
+ {server2client,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']}]},
+ {compression,[{client2server,[none,'[email protected]',zlib]},
+ {server2client,[none,'[email protected]',zlib]}]}]
+ </code>
+ </section>
+
+ <section>
+ <title>Example 4</title>
+ <p>It is of course possible to change more than one list:</p>
+ <code>
+4> ssh:chk_algos_opts(
+ [{preferred_algorithms,
+ [{cipher,['aes128-ctr']},
+ {mac,['hmac-sha2-256']},
+ {kex,['ecdh-sha2-nistp384']},
+ {public_key,['ssh-rsa']},
+ {compression,[{server2client,[none]},
+ {client2server,[zlib]}]}
+ ]
+ }
+ ]).
+[{kex,['ecdh-sha2-nistp384']},
+ {public_key,['ssh-rsa']},
+ {cipher,[{client2server,['aes128-ctr']},
+ {server2client,['aes128-ctr']}]},
+ {mac,[{client2server,['hmac-sha2-256']},
+ {server2client,['hmac-sha2-256']}]},
+ {compression,[{client2server,[zlib]},
+ {server2client,[none]}]}]
+
+ </code>
+ <p>Note that the ordering of the tuples in the lists didn't matter.</p>
+ </section>
+ </section>
+
+ <section>
+ <title>Modifying the default set: modify_algorithms</title>
+ <p>A situation where it might be useful to add an algorithm is when one need to use a supported but disabled one.
+ An example is the <c>'diffie-hellman-group1-sha1'</c> which nowadays is very unsecure and therefore disabled. It is
+ however still supported and might be used.</p>
+
+ <p>The option <c>preferred_algorithms</c> may be complicated to use for adding or removing single algorithms.
+ First one has to list them with <c>ssh:default_algorithms()</c> and then do changes in the lists.</p>
+
+ <p>To facilitate addition or removal of algorithms the option <c>modify_algorithms</c> is available.
+ See the <seealso marker="ssh#option_modify_algorithms">Reference Manual</seealso> for details.</p>
+
+ <p>The option takes a list with instructions to append, prepend or remove algorithms:</p>
+ <code type="erl">
+{modify_algorithms, [{append, ...},
+ {prepend, ...},
+ {rm, ...}
+ ]}
+ </code>
+ <p>Each of the <c>...</c> can be a <c>algs_list()</c> as the argument to the <c>preferred_algorithms</c> option.</p>
+ <section>
+ <title>Example 5</title>
+ <p>As an example let's add the Diffie-Hellman Group1 first in the kex list. It is supported according to
+ <seealso marker="SSH_app#supported_algos">Supported algoritms</seealso>.</p>
+ <code type="erl">
+5> ssh:chk_algos_opts(
+ [{modify_algorithms,
+ [{prepend,
+ [{kex,['diffie-hellman-group1-sha1']}]
+ }
+ ]
+ }
+ ]).
+[{kex,['diffie-hellman-group1-sha1','ecdh-sha2-nistp384',
+ 'ecdh-sha2-nistp521','ecdh-sha2-nistp256',
+ 'diffie-hellman-group-exchange-sha256',
+ 'diffie-hellman-group16-sha512',
+ 'diffie-hellman-group18-sha512',
+ 'diffie-hellman-group14-sha256',
+ 'diffie-hellman-group14-sha1',
+ 'diffie-hellman-group-exchange-sha1']},
+ {public_key,['ecdsa-sha2-nistp384','ecdsa-sha2-nistp521',
+ 'ecdsa-sha2-nistp256','ssh-rsa','rsa-sha2-256',
+ 'rsa-sha2-512','ssh-dss']},
+ {cipher,[{client2server,['[email protected]',
+ 'aes256-ctr','aes192-ctr','[email protected]',
+ 'aes128-ctr','aes128-cbc','3des-cbc']},
+ {server2client,['[email protected]','aes256-ctr',
+ 'aes192-ctr','[email protected]','aes128-ctr',
+ 'aes128-cbc','3des-cbc']}]},
+ {mac,[{client2server,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']},
+ {server2client,['hmac-sha2-256','hmac-sha2-512',
+ 'hmac-sha1']}]},
+ {compression,[{client2server,[none,'[email protected]',zlib]},
+ {server2client,[none,'[email protected]',zlib]}]}]
+
+ </code>
+ <p>And the result shows that the Diffie-Hellman Group1 is added at the head of the kex list</p>
+ </section>
+
+ <section>
+ <title>Example 6</title>
+ <p>In this example, we in put the 'diffie-hellman-group1-sha1' first and also move the
+ <c>'ecdh-sha2-nistp521'</c> to the end in the kex list, that is, <c>append</c> it.</p>
+ <code type="erl">
+6> ssh:chk_algos_opts(
+ [{modify_algorithms,
+ [{prepend,
+ [{kex, ['diffie-hellman-group1-sha1']}
+ ]},
+ {append,
+ [{kex, ['ecdh-sha2-nistp521']}
+ ]}
+ ]
+ }
+ ]).
+[{kex,['diffie-hellman-group1-sha1','ecdh-sha2-nistp384',
+ 'ecdh-sha2-nistp256','diffie-hellman-group-exchange-sha256',
+ 'diffie-hellman-group16-sha512',
+ 'diffie-hellman-group18-sha512',
+ 'diffie-hellman-group14-sha256',
+ 'diffie-hellman-group14-sha1',
+ 'diffie-hellman-group-exchange-sha1','ecdh-sha2-nistp521']},
+ {public_key,['ecdsa-sha2-nistp384','ecdsa-sha2-nistp521',
+ .....
+]
+ </code>
+ <p>Note that the appended algorithm is removed from its original place and then appended to the same list.</p>
+ </section>
+
+ <section>
+ <title>Example 7</title>
+ <p>In this example, we use both options (<c>preferred_algorithms</c> and <c>modify_algorithms</c>) and
+ also try to prepend an unsupported algorithm. Any unsupported algorithm is quietly removed.</p>
+ <code type="erl">
+7> ssh:chk_algos_opts(
+ [{preferred_algorithms,
+ [{cipher,['aes128-ctr']},
+ {mac,['hmac-sha2-256']},
+ {kex,['ecdh-sha2-nistp384']},
+ {public_key,['ssh-rsa']},
+ {compression,[{server2client,[none]},
+ {client2server,[zlib]}]}
+ ]
+ },
+ {modify_algorithms,
+ [{prepend,
+ [{kex, ['some unsupported algorithm']}
+ ]},
+ {append,
+ [{kex, ['diffie-hellman-group1-sha1']}
+ ]}
+ ]
+ }
+ ]).
+[{kex,['ecdh-sha2-nistp384','diffie-hellman-group1-sha1']},
+ {public_key,['ssh-rsa']},
+ {cipher,[{client2server,['aes128-ctr']},
+ {server2client,['aes128-ctr']}]},
+ {mac,[{client2server,['hmac-sha2-256']},
+ {server2client,['hmac-sha2-256']}]},
+ {compression,[{client2server,[zlib]},
+ {server2client,[none]}]}]
+
+ </code>
+ <p>It is of course questionable why anyone would like to use the both these options together,
+ but it is possible if an unforeseen need should arise.</p>
+ </section>
+
+
+
+ </section>
+
+</chapter>
diff --git a/lib/ssh/doc/src/notes.xml b/lib/ssh/doc/src/notes.xml
index f93753f1d2..5826d14a4a 100644
--- a/lib/ssh/doc/src/notes.xml
+++ b/lib/ssh/doc/src/notes.xml
@@ -30,6 +30,22 @@
<file>notes.xml</file>
</header>
+<section><title>Ssh 4.5.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ All unknown options are sent to the transport handler
+ regardless of type.</p>
+ <p>
+ Own Id: OTP-14541 Aux Id: EIRERL-63 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Ssh 4.5</title>
<section><title>Improvements and New Features</title>
diff --git a/lib/ssh/doc/src/ssh.xml b/lib/ssh/doc/src/ssh.xml
index ea7e975ef5..d9516fff12 100644
--- a/lib/ssh/doc/src/ssh.xml
+++ b/lib/ssh/doc/src/ssh.xml
@@ -108,6 +108,9 @@
<tag><c>double_algs() =</c></tag>
<item><p><c>[{client2serverlist,simple_algs()},{server2client,simple_algs()}] | simple_algs()</c></p></item>
+
+ <tag><c>modify_algs_list() =</c></tag>
+ <item><p><c>list( {append,algs_list()} | {prepend,algs_list()} | {rm,algs_list()} )</c></p></item>
</taglist>
</section>
@@ -254,7 +257,8 @@
</p>
</item>
- <tag><c><![CDATA[{preferred_algorithms, algs_list()}]]></c></tag>
+ <tag><marker id="option_preferred_algorithms"></marker>
+ <c><![CDATA[{preferred_algorithms, algs_list()}]]></c></tag>
<item>
<p>List of algorithms to use in the algorithm negotiation. The default <c>algs_list()</c> can
be obtained from <seealso marker="#default_algorithms/0">default_algorithms/0</seealso>.
@@ -275,6 +279,8 @@
for cipher but specifies the same algorithms for mac and compression in both directions.
The kex (key exchange) is implicit but public_key is set explicitly.</p>
+ <p>For background and more examples see the <seealso marker="configure_algos#introduction">User's Guide</seealso>.</p>
+
<warning>
<p>Changing the values can make a connection less secure. Do not change unless you
know exactly what you are doing. If you do not understand the values then you
@@ -282,6 +288,62 @@
</warning>
</item>
+ <tag><marker id="option_modify_algorithms"></marker>
+ <c><![CDATA[{modify_algorithms, modify_algs_list()}]]></c></tag>
+ <item>
+ <p>Modifies the list of algorithms to use in the algorithm negotiation. The modifications are
+ applied after the option <c>preferred_algorithms</c> (if existing) is applied.</p>
+ <p>The algoritm for modifications works like this:</p>
+ <list>
+ <item>
+ <p>Input is the <c>modify_algs_list()</c> and a set of algorithms <c>A</c>
+ obtained from the <c>preferred_algorithms</c> option if existing, or else from the
+ <seealso marker="ssh#default_algorithms-0">ssh:default_algorithms/0</seealso>.
+ </p>
+ </item>
+ <item>
+ <p>The head of the <c>modify_algs_list()</c> modifies <c>A</c> giving the result <c>A'</c>.</p>
+ <p>The possible modifications are:</p>
+ <list>
+ <item>
+ <p>Append or prepend supported but not enabled algorithm(s) to the list of
+ algorithms. If the wanted algorithms already are in <c>A</c> they will first
+ be removed and then appended or prepended,
+ </p>
+ </item>
+ <item>
+ <p>Remove (rm) one or more algorithms from <c>A</c>.
+ </p>
+ </item>
+ </list>
+ </item>
+ <item>
+ <p>Repeat the modification step with the tail of <c>modify_algs_list()</c> and the resulting
+ <c>A'</c>.
+ </p>
+ </item>
+ </list>
+ <p>If an unsupported algorithm is in the <c>modify_algs_list()</c>, it will be silently ignored</p>
+ <p>If there are more than one modify_algorithms options, the result is undefined.</p>
+ <p>Here is an example of this option:</p>
+ <code>
+{modify_algorithms,
+ [{prepend, [{kex, ['diffie-hellman-group1-sha1']}],
+ {rm, [{compression, [none]}]}
+ ]
+}
+</code>
+ <p>The example specifies that:</p>
+ <list>
+ <item><p>the old key exchange algorithm 'diffie-hellman-group1-sha1' should be
+ the main alternative. It will be the main alternative since it is prepened to the list</p>
+ </item>
+ <item><p>The compression algorithm none (= no compression) is removed so compression is enforced</p>
+ </item>
+ </list>
+ <p>For background and more examples see the <seealso marker="configure_algos#introduction">User's Guide</seealso>.</p>
+ </item>
+
<tag><c><![CDATA[{dh_gex_limits,{Min=integer(),I=integer(),Max=integer()}}]]></c></tag>
<item>
<p>Sets the three diffie-hellman-group-exchange parameters that guides the connected server in choosing a group.
@@ -555,6 +617,8 @@
for cipher but specifies the same algorithms for mac and compression in both directions.
The kex (key exchange) is implicit but public_key is set explicitly.</p>
+ <p>For background and more examples see the <seealso marker="configure_algos#introduction">User's Guide</seealso>.</p>
+
<warning>
<p>Changing the values can make a connection less secure. Do not change unless you
know exactly what you are doing. If you do not understand the values then you
@@ -562,6 +626,41 @@
</warning>
</item>
+ <tag><marker id="option_modify_algorithms"></marker>
+ <c><![CDATA[{modify_algorithms, modify_algs_list()}]]></c></tag>
+ <item>
+ <p>Modifies the list of algorithms to use in the algorithm negotiation. The modifications are
+ applied after the option <c>preferred_algorithms</c> is applied (if existing)</p>
+ <p>The possible modifications are to:</p>
+ <list>
+ <item><p>Append or prepend supported but not enabled algorithm(s) to the list of
+ algorithms.</p><p>If the wanted algorithms already are in the list of algorithms, they will first
+ be removed and then appended or prepended.
+ </p>
+ </item>
+ <item><p>Remove (rm) one or more algorithms from the list of algorithms.</p></item>
+ </list>
+ <p>If an unsupported algorithm is in the list, it will be silently ignored</p>
+
+ <p>Here is an example of this option:</p>
+ <code>
+{modify_algorithms,
+ [{prepend, [{kex, ['diffie-hellman-group1-sha1']}],
+ {rm, [{compression, [none]}]}
+ ]
+}
+</code>
+ <p>The example specifies that:</p>
+ <list>
+ <item><p>the old key exchange algorithm 'diffie-hellman-group1-sha1' should be
+ the main alternative. It will be the main alternative since it is prepened to the list</p>
+ </item>
+ <item><p>The compression algorithm none (= no compression) is removed so compression is enforced</p>
+ </item>
+ </list>
+ <p>For background and more examples see the <seealso marker="configure_algos#introduction">User's Guide</seealso>.</p>
+ </item>
+
<tag><c><![CDATA[{dh_gex_groups, [{Size=integer(),G=integer(),P=integer()}] | {file,filename()} {ssh_moduli_file,filename()} }]]></c></tag>
<item>
<p>Defines the groups the server may choose among when diffie-hellman-group-exchange is negotiated.
diff --git a/lib/ssh/doc/src/ssh_app.xml b/lib/ssh/doc/src/ssh_app.xml
index 33ec7aaee0..1cbbdfcf38 100644
--- a/lib/ssh/doc/src/ssh_app.xml
+++ b/lib/ssh/doc/src/ssh_app.xml
@@ -97,7 +97,7 @@
<p>The <c>known_hosts</c> file contains a list of approved servers and
their public keys. Once a server is listed, it can be verified
without user interaction.
- </p>
+ </p>
</section>
<section>
<title>Authorized Keys</title>
@@ -135,7 +135,7 @@
</p>
<p>Supported algorithms are:</p>
-
+ <marker id="supported_algos"></marker>
<taglist>
<tag>Key exchange algorithms</tag>
<item>
diff --git a/lib/ssh/doc/src/usersguide.xml b/lib/ssh/doc/src/usersguide.xml
index 70051ba771..d902df6848 100644
--- a/lib/ssh/doc/src/usersguide.xml
+++ b/lib/ssh/doc/src/usersguide.xml
@@ -36,4 +36,5 @@
</description>
<xi:include href="introduction.xml"/>
<xi:include href="using_ssh.xml"/>
+ <xi:include href="configure_algos.xml"/>
</part>
diff --git a/lib/ssh/src/ssh.erl b/lib/ssh/src/ssh.erl
index 5ebab43c30..1a5d48baca 100644
--- a/lib/ssh/src/ssh.erl
+++ b/lib/ssh/src/ssh.erl
@@ -35,6 +35,7 @@
daemon/1, daemon/2, daemon/3,
daemon_info/1,
default_algorithms/0,
+ chk_algos_opts/1,
stop_listener/1, stop_listener/2, stop_listener/3,
stop_daemon/1, stop_daemon/2, stop_daemon/3,
shell/1, shell/2, shell/3
@@ -381,6 +382,27 @@ default_algorithms() ->
ssh_transport:default_algorithms().
%%--------------------------------------------------------------------
+-spec chk_algos_opts(list(any())) -> algs_list() .
+%%--------------------------------------------------------------------
+chk_algos_opts(Opts) ->
+ case lists:foldl(
+ fun({preferred_algorithms,_}, Acc) -> Acc;
+ ({modify_algorithms,_}, Acc) -> Acc;
+ (KV, Acc) -> [KV|Acc]
+ end, [], Opts)
+ of
+ [] ->
+ case ssh_options:handle_options(client, Opts) of
+ M when is_map(M) ->
+ maps:get(preferred_algorithms, M);
+ Others ->
+ Others
+ end;
+ OtherOps ->
+ {error, {non_algo_opts_found,OtherOps}}
+ end.
+
+%%--------------------------------------------------------------------
%%% Internal functions
%%--------------------------------------------------------------------
%% The handle_daemon_args/2 function basically only sets the ip-option in Opts
diff --git a/lib/ssh/src/ssh_io.erl b/lib/ssh/src/ssh_io.erl
index 8ba759ad60..a7cd1daeec 100644
--- a/lib/ssh/src/ssh_io.erl
+++ b/lib/ssh/src/ssh_io.erl
@@ -31,8 +31,8 @@ read_line(Prompt, Opts) ->
format("~s", [listify(Prompt)]),
?GET_INTERNAL_OPT(user_pid, Opts) ! {self(), question},
receive
- Answer when is_list(Answer) ->
- Answer
+ Answer when is_list(Answer) or is_binary(Answer) ->
+ unicode:characters_to_list(Answer)
end.
yes_no(Prompt, Opts) ->
@@ -44,7 +44,7 @@ yes_no(Prompt, Opts) ->
y -> yes;
n -> no;
- Answer when is_list(Answer) ->
+ Answer when is_list(Answer) or is_binary(Answer) ->
case trim(Answer) of
"y" -> yes;
"n" -> no;
@@ -60,7 +60,7 @@ read_password(Prompt, Opts) ->
format("~s", [listify(Prompt)]),
?GET_INTERNAL_OPT(user_pid, Opts) ! {self(), user_password},
receive
- Answer when is_list(Answer) ->
+ Answer when is_list(Answer) or is_binary(Answer) ->
case trim(Answer) of
"" ->
read_password(Prompt, Opts);
diff --git a/lib/ssh/src/ssh_options.erl b/lib/ssh/src/ssh_options.erl
index 7eeed70739..6939094401 100644
--- a/lib/ssh/src/ssh_options.erl
+++ b/lib/ssh/src/ssh_options.erl
@@ -170,9 +170,10 @@ handle_options(Role, PropList0, Opts0) when is_map(Opts0),
OptionDefinitions),
%% Enter the user's values into the map; unknown keys are
%% treated as socket options
- lists:foldl(fun(KV, Vals) ->
- save(KV, OptionDefinitions, Vals)
- end, InitialMap, PropList1)
+ final_preferred_algorithms(
+ lists:foldl(fun(KV, Vals) ->
+ save(KV, OptionDefinitions, Vals)
+ end, InitialMap, PropList1))
catch
error:{eoptions, KV, undefined} ->
{error, {eoptions,KV}};
@@ -236,7 +237,10 @@ save({Key,Value}, Defs, OptMap) when is_map(OptMap) ->
%% by the check fun will give an error exception:
error:{check,{BadValue,Extra}} ->
error({eoptions, {Key,BadValue}, Extra})
- end.
+ end;
+save(Opt, _Defs, OptMap) when is_map(OptMap) ->
+ OptMap#{socket_options := [Opt | maps:get(socket_options,OptMap)]}.
+
%%%================================================================
%%%
@@ -506,6 +510,15 @@ default(common) ->
class => user_options
},
+ %% NOTE: This option is supposed to be used only in this very module (?MODULE). There is
+ %% a final stage in handle_options that "merges" the preferred_algorithms option and this one.
+ %% The preferred_algorithms is the one to use in the rest of the ssh application!
+ {modify_algorithms, def} =>
+ #{default => undefined, % signals error if unsupported algo in preferred_algorithms :(
+ chk => fun check_modify_algorithms/1,
+ class => user_options
+ },
+
{id_string, def} =>
#{default => undefined, % FIXME: see ssh_transport:ssh_vsn/0
chk => fun(random) ->
@@ -817,83 +830,190 @@ valid_hash(L, Ss) when is_list(L) -> lists:all(fun(S) -> valid_hash(S,Ss) end, L
valid_hash(X, _) -> error_in_check(X, "Expect atom or list in fingerprint spec").
%%%----------------------------------------------------------------
-check_preferred_algorithms(Algs) ->
- [error_in_check(K,"Bad preferred_algorithms key")
- || {K,_} <- Algs,
- not lists:keymember(K,1,ssh:default_algorithms())],
+check_modify_algorithms(M) when is_list(M) ->
+ [error_in_check(Op_KVs, "Bad modify_algorithms")
+ || Op_KVs <- M,
+ not is_tuple(Op_KVs)
+ orelse (size(Op_KVs) =/= 2)
+ orelse (not lists:member(element(1,Op_KVs), [append,prepend,rm]))],
+ {true, [{Op,normalize_mod_algs(KVs,false)} || {Op,KVs} <- M]};
+check_modify_algorithms(_) ->
+ error_in_check(modify_algorithms, "Bad option value. List expected.").
+
+
+
+
+normalize_mod_algs(KVs, UseDefaultAlgs) ->
+ normalize_mod_algs(ssh_transport:algo_classes(), KVs, [], UseDefaultAlgs).
+
+normalize_mod_algs([K|Ks], KVs0, Acc, UseDefaultAlgs) ->
+ %% Pick the expected keys in order and check if they are in the user's list
+ {Vs1, KVs} =
+ case lists:keytake(K, 1, KVs0) of
+ {value, {K,Vs0}, KVs1} ->
+ {Vs0, KVs1};
+ false ->
+ {[], KVs0}
+ end,
+ Vs = normalize_mod_alg_list(K, Vs1, UseDefaultAlgs),
+ normalize_mod_algs(Ks, KVs, [{K,Vs} | Acc], UseDefaultAlgs);
+normalize_mod_algs([], [], Acc, _) ->
+ %% No values left in the key-value list after removing the expected entries
+ %% (thats good)
+ lists:reverse(Acc);
+normalize_mod_algs([], [{K,_}|_], _, _) ->
+ %% Some values left in the key-value list after removing the expected entries
+ %% (thats bad)
+ case ssh_transport:algo_class(K) of
+ true -> error_in_check(K, "Duplicate key");
+ false -> error_in_check(K, "Unknown key")
+ end;
+normalize_mod_algs([], [X|_], _, _) ->
+ error_in_check(X, "Bad list element").
- try alg_duplicates(Algs, [], [])
- of
- [] ->
- {true,
- [case proplists:get_value(Key, Algs) of
- undefined ->
- {Key,DefAlgs};
- Vals ->
- handle_pref_alg(Key,Vals,SupAlgs)
- end
- || {{Key,DefAlgs}, {Key,SupAlgs}} <- lists:zip(ssh:default_algorithms(),
- ssh_transport:supported_algorithms())
- ]
- };
-
- Dups ->
- error_in_check(Dups, "Duplicates")
- catch
- _:_ ->
- false
- end.
-alg_duplicates([{K,V}|KVs], Ks, Dups0) ->
- Dups =
- case lists:member(K,Ks) of
- true -> [K|Dups0];
- false -> Dups0
- end,
- case V--lists:usort(V) of
- [] -> alg_duplicates(KVs, [K|Ks], Dups);
- Ds -> alg_duplicates(KVs, [K|Ks], Dups++Ds)
+
+%%% Handle the algorithms list
+normalize_mod_alg_list(K, Vs, UseDefaultAlgs) ->
+ normalize_mod_alg_list(K,
+ ssh_transport:algo_two_spec_class(K),
+ Vs,
+ def_alg(K,UseDefaultAlgs)).
+
+
+normalize_mod_alg_list(_K, _, [], Default) ->
+ Default;
+
+normalize_mod_alg_list(K, true, [{client2server,L1}], [_,{server2client,L2}]) ->
+ [nml1(K,{client2server,L1}),
+ {server2client,L2}];
+
+normalize_mod_alg_list(K, true, [{server2client,L2}], [{client2server,L1},_]) ->
+ [{client2server,L1},
+ nml1(K,{server2client,L2})];
+
+normalize_mod_alg_list(K, true, [{server2client,L2},{client2server,L1}], _) ->
+ [nml1(K,{client2server,L1}),
+ nml1(K,{server2client,L2})];
+
+normalize_mod_alg_list(K, true, [{client2server,L1},{server2client,L2}], _) ->
+ [nml1(K,{client2server,L1}),
+ nml1(K,{server2client,L2})];
+
+normalize_mod_alg_list(K, true, L0, _) ->
+ L = nml(K,L0), % Throws errors
+ [{client2server,L},
+ {server2client,L}];
+
+normalize_mod_alg_list(K, false, L, _) ->
+ nml(K,L).
+
+
+nml1(K, {T,V}) when T==client2server ; T==server2client ->
+ {T, nml({K,T}, V)}.
+
+nml(K, L) ->
+ [error_in_check(K, "Bad value for this key") % This is a throw
+ || V <- L,
+ not is_atom(V)
+ ],
+ case L -- lists:usort(L) of
+ [] -> ok;
+ Dups -> error_in_check({K,Dups}, "Duplicates") % This is a throw
+ end,
+ L.
+
+
+def_alg(K, false) ->
+ case ssh_transport:algo_two_spec_class(K) of
+ false -> [];
+ true -> [{client2server,[]}, {server2client,[]}]
end;
-alg_duplicates([], _Ks, Dups) ->
- Dups.
-
-handle_pref_alg(Key,
- Vs=[{client2server,C2Ss=[_|_]},{server2client,S2Cs=[_|_]}],
- [{client2server,Sup_C2Ss},{server2client,Sup_S2Cs}]
- ) ->
- chk_alg_vs(Key, C2Ss, Sup_C2Ss),
- chk_alg_vs(Key, S2Cs, Sup_S2Cs),
- {Key, Vs};
-
-handle_pref_alg(Key,
- Vs=[{server2client,[_|_]},{client2server,[_|_]}],
- Sup=[{client2server,_},{server2client,_}]
- ) ->
- handle_pref_alg(Key, lists:reverse(Vs), Sup);
-
-handle_pref_alg(Key,
- Vs=[V|_],
- Sup=[{client2server,_},{server2client,_}]
- ) when is_atom(V) ->
- handle_pref_alg(Key, [{client2server,Vs},{server2client,Vs}], Sup);
-
-handle_pref_alg(Key,
- Vs=[V|_],
- Sup=[S|_]
- ) when is_atom(V), is_atom(S) ->
- chk_alg_vs(Key, Vs, Sup),
- {Key, Vs};
-
-handle_pref_alg(Key, Vs, _) ->
- error_in_check({Key,Vs}, "Badly formed list").
-
-chk_alg_vs(OptKey, Values, SupportedValues) ->
- case (Values -- SupportedValues) of
- [] -> Values;
- [none] -> [none]; % for testing only
- Bad -> error_in_check({OptKey,Bad}, "Unsupported value(s) found")
+def_alg(K, true) ->
+ ssh_transport:default_algorithms(K).
+
+
+
+check_preferred_algorithms(Algs) when is_list(Algs) ->
+ check_input_ok(Algs),
+ {true, normalize_mod_algs(Algs, true)};
+
+check_preferred_algorithms(_) ->
+ error_in_check(modify_algorithms, "Bad option value. List expected.").
+
+
+check_input_ok(Algs) ->
+ [error_in_check(KVs, "Bad preferred_algorithms")
+ || KVs <- Algs,
+ not is_tuple(KVs)
+ orelse (size(KVs) =/= 2)].
+
+%%%----------------------------------------------------------------
+final_preferred_algorithms(Options) ->
+ Result =
+ case ?GET_OPT(modify_algorithms, Options) of
+ undefined ->
+ rm_non_supported(true,
+ ?GET_OPT(preferred_algorithms, Options));
+ ModAlgs ->
+ rm_non_supported(false,
+ eval_ops(?GET_OPT(preferred_algorithms, Options),
+ ModAlgs))
+ end,
+ error_if_empty(Result), % Throws errors if any value list is empty
+ ?PUT_OPT({preferred_algorithms,Result}, Options).
+
+eval_ops(PrefAlgs, ModAlgs) ->
+ lists:foldl(fun eval_op/2, PrefAlgs, ModAlgs).
+
+eval_op({Op,AlgKVs}, PrefAlgs) ->
+ eval_op(Op, AlgKVs, PrefAlgs, []).
+
+eval_op(Op, [{C,L1}|T1], [{C,L2}|T2], Acc) ->
+ eval_op(Op, T1, T2, [{C,eval_op(Op,L1,L2,[])} | Acc]);
+
+eval_op(_, [], [], Acc) -> lists:reverse(Acc);
+eval_op(rm, Opt, Pref, []) when is_list(Opt), is_list(Pref) -> Pref -- Opt;
+eval_op(append, Opt, Pref, []) when is_list(Opt), is_list(Pref) -> (Pref--Opt) ++ Opt;
+eval_op(prepend, Opt, Pref, []) when is_list(Opt), is_list(Pref) -> Opt ++ (Pref--Opt).
+
+
+rm_non_supported(UnsupIsErrorFlg, KVs) ->
+ [{K,rmns(K,Vs, UnsupIsErrorFlg)} || {K,Vs} <- KVs].
+
+rmns(K, Vs, UnsupIsErrorFlg) ->
+ case ssh_transport:algo_two_spec_class(K) of
+ false ->
+ rm_unsup(Vs, ssh_transport:supported_algorithms(K), UnsupIsErrorFlg, K);
+ true ->
+ [{C, rm_unsup(Vsx, Sup, UnsupIsErrorFlg, {K,C})}
+ || {{C,Vsx},{C,Sup}} <- lists:zip(Vs,ssh_transport:supported_algorithms(K))
+ ]
end.
+rm_unsup(A, B, Flg, ErrInf) ->
+ case A--B of
+ Unsup=[_|_] when Flg==true -> error({eoptions,
+ {preferred_algorithms,{ErrInf,Unsup}},
+ "Unsupported value(s) found"
+ });
+ Unsup -> A -- Unsup
+ end.
+
+
+error_if_empty([{K,[]}|_]) ->
+ error({eoptions, K, "Empty resulting algorithm list"});
+error_if_empty([{K,[{client2server,[]}, {server2client,[]}]}]) ->
+ error({eoptions, K, "Empty resulting algorithm list"});
+error_if_empty([{K,[{client2server,[]}|_]} | _]) ->
+ error({eoptions, {K,client2server}, "Empty resulting algorithm list"});
+error_if_empty([{K,[_,{server2client,[]}|_]} | _]) ->
+ error({eoptions, {K,server2client}, "Empty resulting algorithm list"});
+error_if_empty([_|T]) ->
+ error_if_empty(T);
+error_if_empty([]) ->
+ ok.
+
%%%----------------------------------------------------------------
forbidden_option(K,V) ->
Txt = io_lib:format("The option '~s' is used internally. The "
diff --git a/lib/ssh/src/ssh_sftp.erl b/lib/ssh/src/ssh_sftp.erl
index c1558a19b1..9e1229dc85 100644
--- a/lib/ssh/src/ssh_sftp.erl
+++ b/lib/ssh/src/ssh_sftp.erl
@@ -1050,7 +1050,7 @@ attr_to_info(A) when is_record(A, ssh_xfer_attr) ->
#file_info{
size = A#ssh_xfer_attr.size,
type = A#ssh_xfer_attr.type,
- access = read_write, %% FIXME: read/write/read_write/none
+ access = file_mode_to_owner_access(A#ssh_xfer_attr.permissions),
atime = unix_to_datetime(A#ssh_xfer_attr.atime),
mtime = unix_to_datetime(A#ssh_xfer_attr.mtime),
ctime = unix_to_datetime(A#ssh_xfer_attr.createtime),
@@ -1062,6 +1062,28 @@ attr_to_info(A) when is_record(A, ssh_xfer_attr) ->
uid = A#ssh_xfer_attr.owner,
gid = A#ssh_xfer_attr.group}.
+file_mode_to_owner_access(FileMode)
+ when is_integer(FileMode) ->
+ %% The file mode contains the access permissions.
+ %% The read and write access permission of file owner
+ %% are located in 8th and 7th bit of file mode respectively.
+
+ ReadPermission = ((FileMode bsr 8) band 1),
+ WritePermission = ((FileMode bsr 7) band 1),
+ case {ReadPermission, WritePermission} of
+ {1, 1} ->
+ read_write;
+ {1, 0} ->
+ read;
+ {0, 1} ->
+ write;
+ {0, 0} ->
+ none;
+ _ ->
+ undefined
+ end;
+file_mode_to_owner_access(_) ->
+ undefined.
unix_to_datetime(undefined) ->
undefined;
diff --git a/lib/ssh/src/ssh_transport.erl b/lib/ssh/src/ssh_transport.erl
index 412f5de9de..c48c0800e4 100644
--- a/lib/ssh/src/ssh_transport.erl
+++ b/lib/ssh/src/ssh_transport.erl
@@ -34,6 +34,8 @@
-export([next_seqnum/1,
supported_algorithms/0, supported_algorithms/1,
default_algorithms/0, default_algorithms/1,
+ algo_classes/0, algo_class/1,
+ algo_two_spec_classes/0, algo_two_spec_class/1,
handle_packet_part/4,
handle_hello_version/1,
key_exchange_init_msg/1,
@@ -81,6 +83,23 @@ default_algorithms() -> [{K,default_algorithms(K)} || K <- algo_classes()].
algo_classes() -> [kex, public_key, cipher, mac, compression].
+algo_class(kex) -> true;
+algo_class(public_key) -> true;
+algo_class(cipher) -> true;
+algo_class(mac) -> true;
+algo_class(compression) -> true;
+algo_class(_) -> false.
+
+
+algo_two_spec_classes() -> [cipher, mac, compression].
+
+algo_two_spec_class(cipher) -> true;
+algo_two_spec_class(mac) -> true;
+algo_two_spec_class(compression) -> true;
+algo_two_spec_class(_) -> false.
+
+
+
default_algorithms(kex) ->
supported_algorithms(kex, [
'diffie-hellman-group1-sha1' % Gone in OpenSSH 7.3.p1
diff --git a/lib/ssh/test/ssh_protocol_SUITE.erl b/lib/ssh/test/ssh_protocol_SUITE.erl
index 0837fe7eaf..7da921adb2 100644
--- a/lib/ssh/test/ssh_protocol_SUITE.erl
+++ b/lib/ssh/test/ssh_protocol_SUITE.erl
@@ -34,8 +34,8 @@
-define(NEWLINE, <<"\r\n">>).
-define(REKEY_DATA_TMO, 65000).
-%%-define(DEFAULT_KEX, 'diffie-hellman-group1-sha1').
-define(DEFAULT_KEX, 'diffie-hellman-group14-sha256').
+-define(EXTRA_KEX, 'diffie-hellman-group1-sha1').
-define(CIPHERS, ['aes256-ctr','aes192-ctr','aes128-ctr','aes128-cbc','3des-cbc']).
-define(DEFAULT_CIPHERS, [{client2server,?CIPHERS}, {server2client,?CIPHERS}]).
@@ -60,7 +60,8 @@ all() ->
{group,authentication},
{group,packet_size_error},
{group,field_size_error},
- {group,ext_info}
+ {group,ext_info},
+ {group,preferred_algorithms}
].
groups() ->
@@ -96,7 +97,13 @@ groups() ->
no_ext_info_s2,
ext_info_s,
ext_info_c
- ]}
+ ]},
+ {preferred_algorithms, [], [preferred_algorithms,
+ modify_append,
+ modify_prepend,
+ modify_rm,
+ modify_combo
+ ]}
].
@@ -701,8 +708,6 @@ ext_info_s(Config) ->
%%%--------------------------------------------------------------------
%%% The client sends the extension
ext_info_c(Config) ->
- {User,_Pwd} = server_user_password(Config),
-
%% Create a listening socket as server socket:
{ok,InitialState} = ssh_trpt_test_lib:exec(listen),
HostPort = ssh_trpt_test_lib:server_host_port(InitialState),
@@ -757,10 +762,135 @@ ext_info_c(Config) ->
{result, Pid, Error} -> ct:fail("Error: ~p",[Error])
end.
+
+%%%----------------------------------------------------------------
+%%%
+preferred_algorithms(Config) ->
+ Ciphers = filter_supported(cipher, ?CIPHERS),
+ {error,{eoptions,{{preferred_algorithms,{kex,[some_unknown_algo]}},
+ "Unsupported value(s) found"}}} =
+ chk_pref_algs(Config,
+ [?DEFAULT_KEX],
+ Ciphers,
+ [{preferred_algorithms, [{kex,[some_unknown_algo,?DEFAULT_KEX]},
+ {cipher,Ciphers}
+ ]}
+ ]).
+
+%%%----------------------------------------------------------------
+%%%
+modify_append(Config) ->
+ Ciphers = filter_supported(cipher, ?CIPHERS),
+ {ok,_} =
+ chk_pref_algs(Config,
+ [?DEFAULT_KEX, ?EXTRA_KEX],
+ Ciphers,
+ [{preferred_algorithms, [{kex,[?DEFAULT_KEX]},
+ {cipher,Ciphers}
+ ]},
+ {modify_algorithms, [{append,[{kex,[some_unknown_algo,?EXTRA_KEX]}]}]}
+ ]).
+
+%%%----------------------------------------------------------------
+%%%
+modify_prepend(Config) ->
+ Ciphers = filter_supported(cipher, ?CIPHERS),
+ {ok,_} =
+ chk_pref_algs(Config,
+ [?EXTRA_KEX, ?DEFAULT_KEX],
+ Ciphers,
+ [{preferred_algorithms, [{kex,[?DEFAULT_KEX]},
+ {cipher,Ciphers}
+ ]},
+ {modify_algorithms, [{prepend,[{kex,[some_unknown_algo,?EXTRA_KEX]}]}]}
+ ]).
+
+%%%----------------------------------------------------------------
+%%%
+modify_rm(Config) ->
+ Ciphers = filter_supported(cipher, ?CIPHERS),
+ {ok,_} =
+ chk_pref_algs(Config,
+ [?DEFAULT_KEX],
+ tl(Ciphers),
+ [{preferred_algorithms, [{kex,[?DEFAULT_KEX,?EXTRA_KEX]},
+ {cipher,Ciphers}
+ ]},
+ {modify_algorithms, [{rm,[{kex,[some_unknown_algo,?EXTRA_KEX]},
+ {cipher,[hd(Ciphers)]}
+ ]}
+ ]}
+ ]).
+
+
+%%%----------------------------------------------------------------
+%%%
+modify_combo(Config) ->
+ Ciphers = filter_supported(cipher, ?CIPHERS),
+ LastC = lists:last(Ciphers),
+ {ok,_} =
+ chk_pref_algs(Config,
+ [?DEFAULT_KEX],
+ [LastC] ++ (tl(Ciphers)--[LastC]) ++ [hd(Ciphers)],
+ [{preferred_algorithms, [{kex,[?DEFAULT_KEX,?EXTRA_KEX]},
+ {cipher,Ciphers}
+ ]},
+ {modify_algorithms, [{rm,[{kex,[some_unknown_algo,?EXTRA_KEX]}
+ ]},
+ {prepend,[{cipher,[{server2client,[LastC]}]}
+ ]},
+ {append,[{cipher,[a,hd(Ciphers),b]}
+ ]}
+ ]}
+ ]).
+
%%%================================================================
%%%==== Internal functions ========================================
%%%================================================================
+chk_pref_algs(Config,
+ ExpectedKex,
+ ExpectedCiphers,
+ ServerPrefOpts) ->
+ %% Start the dameon
+ case ssh_test_lib:daemon(
+ [{send_ext_info,false},
+ {recv_ext_info,false},
+ {system_dir, system_dir(Config)}
+ | ServerPrefOpts])
+ of
+ {_,Host,Port} ->
+ %% Check the Kex part
+ ssh_trpt_test_lib:exec(
+ [{set_options, [print_ops, {print_messages,detail}]},
+ {connect, Host, Port,
+ [{silently_accept_hosts, true},
+ {user_dir, user_dir(Config)},
+ {user_interaction, false}
+ ]},
+ {send, hello},
+ receive_hello,
+ {match,
+ #ssh_msg_kexinit{
+ kex_algorithms = to_lists(ExpectedKex),
+ encryption_algorithms_server_to_client = to_lists(ExpectedCiphers),
+ _ = '_'},
+ receive_msg}
+ ]);
+ Error ->
+ Error
+ end.
+
+
+filter_supported(K, Algs) -> Algs -- (Algs--supported(K)).
+
+supported(K) -> proplists:get_value(
+ server2client,
+ ssh_transport:supported_algorithms(cipher)).
+
+to_lists(L) -> lists:map(fun erlang:atom_to_list/1, L).
+
+
%%%---- init_suite and end_suite ---------------------------------------
start_apps(Config) ->
catch ssh:stop(),
diff --git a/lib/ssh/test/ssh_sftp_SUITE.erl b/lib/ssh/test/ssh_sftp_SUITE.erl
index 680a8ef52e..7aa3d8a00a 100644
--- a/lib/ssh/test/ssh_sftp_SUITE.erl
+++ b/lib/ssh/test/ssh_sftp_SUITE.erl
@@ -92,7 +92,7 @@ groups() ->
{write_read_tests, [], [open_close_file, open_close_dir, read_file, read_dir,
write_file, write_file_iolist, write_big_file, sftp_read_big_file,
rename_file, mk_rm_dir, remove_file, links,
- retrieve_attributes, set_attributes, async_read,
+ retrieve_attributes, set_attributes, file_owner_access, async_read,
async_write, position, pos_read, pos_write,
start_channel_sock
]}
@@ -521,7 +521,36 @@ set_attributes(Config) when is_list(Config) ->
ok = file:write_file(FileName, "hello again").
%%--------------------------------------------------------------------
+file_owner_access() ->
+ [{doc,"Test file user access validity"}].
+file_owner_access(Config) when is_list(Config) ->
+ case os:type() of
+ {win32, _} ->
+ {skip, "Not a relevant test on Windows"};
+ _ ->
+ FileName = proplists:get_value(filename, Config),
+ {Sftp, _} = proplists:get_value(sftp, Config),
+
+ {ok, #file_info{mode = InitialMode}} = ssh_sftp:read_file_info(Sftp, FileName),
+
+ ok = ssh_sftp:write_file_info(Sftp, FileName, #file_info{mode=8#000}),
+ {ok, #file_info{access = none}} = ssh_sftp:read_file_info(Sftp, FileName),
+
+ ok = ssh_sftp:write_file_info(Sftp, FileName, #file_info{mode=8#400}),
+ {ok, #file_info{access = read}} = ssh_sftp:read_file_info(Sftp, FileName),
+
+ ok = ssh_sftp:write_file_info(Sftp, FileName, #file_info{mode=8#200}),
+ {ok, #file_info{access = write}} = ssh_sftp:read_file_info(Sftp, FileName),
+ ok = ssh_sftp:write_file_info(Sftp, FileName, #file_info{mode=8#600}),
+ {ok, #file_info{access = read_write}} = ssh_sftp:read_file_info(Sftp, FileName),
+
+ ok = ssh_sftp:write_file_info(Sftp, FileName, #file_info{mode=InitialMode}),
+
+ ok
+ end.
+
+%%--------------------------------------------------------------------
async_read() ->
[{doc,"Test API aread/3"}].
async_read(Config) when is_list(Config) ->
diff --git a/lib/ssh/vsn.mk b/lib/ssh/vsn.mk
index 7208baca6e..006228f8e7 100644
--- a/lib/ssh/vsn.mk
+++ b/lib/ssh/vsn.mk
@@ -1,5 +1,5 @@
#-*-makefile-*- ; force emacs to enter makefile-mode
-SSH_VSN = 4.5
+SSH_VSN = 4.5.1
APP_VSN = "ssh-$(SSH_VSN)"
diff --git a/lib/ssl/src/dtls_connection.erl b/lib/ssl/src/dtls_connection.erl
index e8cfbbe2e3..ff3e69bae5 100644
--- a/lib/ssl/src/dtls_connection.erl
+++ b/lib/ssl/src/dtls_connection.erl
@@ -48,7 +48,7 @@
select_sni_extension/1]).
%% Alert and close handling
--export([encode_alert/3,send_alert/2, close/5]).
+-export([encode_alert/3,send_alert/2, close/5, protocol_name/0]).
%% Data handling
@@ -208,6 +208,9 @@ setopts(Transport, Socket, Other) ->
getopts(Transport, Socket, Tag) ->
dtls_socket:getopts(Transport, Socket, Tag).
+protocol_name() ->
+ "DTLS".
+
%%====================================================================
%% tls_connection_sup API
%%====================================================================
@@ -273,7 +276,9 @@ init({call, _} = Type, Event, #state{role = server, transport_cb = gen_udp} = St
Result = ssl_connection:init(Type, Event,
State#state{flight_state = {retransmit, ?INITIAL_RETRANSMIT_TIMEOUT},
protocol_specific = #{current_cookie_secret => dtls_v1:cookie_secret(),
- previous_cookie_secret => <<>>}},
+ previous_cookie_secret => <<>>,
+ ignored_alerts => 0,
+ max_ignored_alerts => 10}},
?MODULE),
erlang:send_after(dtls_v1:cookie_timeout(), self(), new_cookie_secret),
Result;
@@ -371,7 +376,7 @@ hello(internal, #server_hello{} = Hello,
ssl_options = SslOptions} = State) ->
case dtls_handshake:hello(Hello, SslOptions, ConnectionStates0, Renegotiation) of
#alert{} = Alert ->
- ssl_connection:handle_own_alert(Alert, ReqVersion, hello, State);
+ handle_own_alert(Alert, ReqVersion, hello, State);
{Version, NewId, ConnectionStates, ProtoExt, Protocol} ->
ssl_connection:handle_session(Hello,
Version, NewId, ConnectionStates, ProtoExt, Protocol, State)
@@ -543,7 +548,7 @@ handle_call(Event, From, StateName, State) ->
handle_common_event(internal, #alert{} = Alert, StateName,
#state{negotiated_version = Version} = State) ->
- ssl_connection:handle_own_alert(Alert, Version, StateName, State);
+ handle_own_alert(Alert, Version, StateName, State);
%%% DTLS record protocol level handshake messages
handle_common_event(internal, #ssl_tls{type = ?HANDSHAKE,
fragment = Data},
@@ -562,7 +567,7 @@ handle_common_event(internal, #ssl_tls{type = ?HANDSHAKE,
State#state{unprocessed_handshake_events = unprocessed_events(Events)}, Events}
end
catch throw:#alert{} = Alert ->
- ssl_connection:handle_own_alert(Alert, Version, StateName, State0)
+ handle_own_alert(Alert, Version, StateName, State0)
end;
%%% DTLS record protocol level application data messages
handle_common_event(internal, #ssl_tls{type = ?APPLICATION_DATA, fragment = Data}, StateName, State) ->
@@ -577,7 +582,7 @@ handle_common_event(internal, #ssl_tls{type = ?ALERT, fragment = EncAlerts}, Sta
Alerts = [_|_] ->
handle_alerts(Alerts, {next_state, StateName, State});
#alert{} = Alert ->
- ssl_connection:handle_own_alert(Alert, Version, StateName, State)
+ handle_own_alert(Alert, Version, StateName, State)
end;
%% Ignore unknown TLS record level protocol messages
handle_common_event(internal, #ssl_tls{type = _Unknown}, StateName, State) ->
@@ -629,7 +634,7 @@ handle_client_hello(#client_hello{client_version = ClientVersion} = Hello,
case dtls_handshake:hello(Hello, SslOpts, {Port, Session0, Cache, CacheCb,
ConnectionStates0, Cert, KeyExAlg}, Renegotiation) of
#alert{} = Alert ->
- ssl_connection:handle_own_alert(Alert, ClientVersion, hello, State0);
+ handle_own_alert(Alert, ClientVersion, hello, State0);
{Version, {Type, Session},
ConnectionStates, Protocol0, ServerHelloExt, HashSign} ->
Protocol = case Protocol0 of
@@ -964,3 +969,54 @@ unprocessed_events(Events) ->
%% process more TLS-records received on the socket.
erlang:length(Events)-1.
+handle_own_alert(Alert, Version, StateName, #state{transport_cb = gen_udp,
+ role = Role,
+ ssl_options = Options} = State0) ->
+ case ignore_alert(Alert, State0) of
+ {true, State} ->
+ log_ignore_alert(Options#ssl_options.log_alert, StateName, Alert, Role),
+ {next_state, StateName, State};
+ {false, State} ->
+ ssl_connection:handle_own_alert(Alert, Version, StateName, State)
+ end;
+handle_own_alert(Alert, Version, StateName, State) ->
+ ssl_connection:handle_own_alert(Alert, Version, StateName, State).
+
+
+ignore_alert(#alert{level = ?FATAL}, #state{protocol_specific = #{ignored_alerts := N,
+ max_ignored_alerts := N}} = State) ->
+ {false, State};
+ignore_alert(#alert{level = ?FATAL} = Alert,
+ #state{protocol_specific = #{ignored_alerts := N} = PS} = State) ->
+ case is_ignore_alert(Alert) of
+ true ->
+ {true, State#state{protocol_specific = PS#{ignored_alerts => N+1}}};
+ false ->
+ {false, State}
+ end;
+ignore_alert(_, State) ->
+ {false, State}.
+
+%% RFC 6347 4.1.2.7. Handling Invalid Records
+%% recommends to silently ignore invalid DTLS records when
+%% upd is the transport. Note we do not support compression so no need
+%% include ?DECOMPRESSION_FAILURE
+is_ignore_alert(#alert{description = ?BAD_RECORD_MAC}) ->
+ true;
+is_ignore_alert(#alert{description = ?RECORD_OVERFLOW}) ->
+ true;
+is_ignore_alert(#alert{description = ?DECODE_ERROR}) ->
+ true;
+is_ignore_alert(#alert{description = ?DECRYPT_ERROR}) ->
+ true;
+is_ignore_alert(#alert{description = ?ILLEGAL_PARAMETER}) ->
+ true;
+is_ignore_alert(_) ->
+ false.
+
+log_ignore_alert(true, StateName, Alert, Role) ->
+ Txt = ssl_alert:alert_txt(Alert),
+ error_logger:format("DTLS over UDP ~p: In state ~p ignored to send ALERT ~s as DoS-attack mitigation \n",
+ [Role, StateName, Txt]);
+log_ignore_alert(false, _, _,_) ->
+ ok.
diff --git a/lib/ssl/src/dtls_socket.erl b/lib/ssl/src/dtls_socket.erl
index fbbd479428..5f854fbb4b 100644
--- a/lib/ssl/src/dtls_socket.erl
+++ b/lib/ssl/src/dtls_socket.erl
@@ -137,7 +137,7 @@ internal_inet_values() ->
[{active, false}, {mode,binary}].
default_inet_values() ->
- [{active, true}, {mode, list}].
+ [{active, true}, {mode, list}, {packet, 0}, {packet_size, 0}].
default_cb_info() ->
{gen_udp, udp, udp_closed, udp_error}.
@@ -149,8 +149,12 @@ get_emulated_opts(EmOpts, EmOptNames) ->
emulated_socket_options(InetValues, #socket_options{
mode = Mode,
+ packet = Packet,
+ packet_size = PacketSize,
active = Active}) ->
#socket_options{
mode = proplists:get_value(mode, InetValues, Mode),
+ packet = proplists:get_value(packet, InetValues, Packet),
+ packet_size = proplists:get_value(packet_size, InetValues, PacketSize),
active = proplists:get_value(active, InetValues, Active)
}.
diff --git a/lib/ssl/src/ssl.erl b/lib/ssl/src/ssl.erl
index 75eb308ba5..4e592c02ec 100644
--- a/lib/ssl/src/ssl.erl
+++ b/lib/ssl/src/ssl.erl
@@ -569,7 +569,7 @@ renegotiate(#sslsocket{pid = {Listen,_}}) when is_port(Listen) ->
%%--------------------------------------------------------------------
-spec prf(#sslsocket{}, binary() | 'master_secret', binary(),
- binary() | prf_random(), non_neg_integer()) ->
+ [binary() | prf_random()], non_neg_integer()) ->
{ok, binary()} | {error, reason()}.
%%
%% Description: use a ssl sessions TLS PRF to generate key material
@@ -713,6 +713,13 @@ handle_options(Opts0, Role, Host) ->
Protocol = handle_option(protocol, Opts, tls),
+ case Versions of
+ [{3, 0}] ->
+ reject_alpn_next_prot_options(Opts);
+ _ ->
+ ok
+ end,
+
SSLOptions = #ssl_options{
versions = Versions,
verify = validate_option(verify, Verify),
@@ -809,7 +816,7 @@ handle_options(Opts0, Role, Host) ->
ConnetionCb = connection_cb(Opts),
{ok, #config{ssl = SSLOptions, emulated = Emulated, inet_ssl = Sock,
- inet_user = SockOpts, transport_info = CbInfo, connection_cb = ConnetionCb
+ inet_user = Sock, transport_info = CbInfo, connection_cb = ConnetionCb
}}.
@@ -956,55 +963,32 @@ validate_option(hibernate_after, Value) when is_integer(Value), Value >= 0 ->
validate_option(erl_dist,Value) when is_boolean(Value) ->
Value;
-validate_option(Opt, Value)
- when Opt =:= alpn_advertised_protocols orelse Opt =:= alpn_preferred_protocols,
- is_list(Value) ->
- case tls_record:highest_protocol_version([]) of
- {3,0} ->
- throw({error, {options, {not_supported_in_sslv3, {Opt, Value}}}});
- _ ->
- validate_binary_list(Opt, Value),
- Value
- end;
+validate_option(Opt, Value) when Opt =:= alpn_advertised_protocols orelse Opt =:= alpn_preferred_protocols,
+ is_list(Value) ->
+ validate_binary_list(Opt, Value),
+ Value;
validate_option(Opt, Value)
when Opt =:= alpn_advertised_protocols orelse Opt =:= alpn_preferred_protocols,
Value =:= undefined ->
undefined;
-validate_option(client_preferred_next_protocols = Opt, {Precedence, PreferredProtocols} = Value)
+validate_option(client_preferred_next_protocols, {Precedence, PreferredProtocols})
when is_list(PreferredProtocols) ->
- case tls_record:highest_protocol_version([]) of
- {3,0} ->
- throw({error, {options, {not_supported_in_sslv3, {Opt, Value}}}});
- _ ->
- validate_binary_list(client_preferred_next_protocols, PreferredProtocols),
- validate_npn_ordering(Precedence),
- {Precedence, PreferredProtocols, ?NO_PROTOCOL}
- end;
-validate_option(client_preferred_next_protocols = Opt, {Precedence, PreferredProtocols, Default} = Value)
- when is_list(PreferredProtocols), is_binary(Default),
- byte_size(Default) > 0, byte_size(Default) < 256 ->
- case tls_record:highest_protocol_version([]) of
- {3,0} ->
- throw({error, {options, {not_supported_in_sslv3, {Opt, Value}}}});
- _ ->
- validate_binary_list(client_preferred_next_protocols, PreferredProtocols),
- validate_npn_ordering(Precedence),
- Value
- end;
-
+ validate_binary_list(client_preferred_next_protocols, PreferredProtocols),
+ validate_npn_ordering(Precedence),
+ {Precedence, PreferredProtocols, ?NO_PROTOCOL};
+validate_option(client_preferred_next_protocols, {Precedence, PreferredProtocols, Default} = Value)
+ when is_list(PreferredProtocols), is_binary(Default),
+ byte_size(Default) > 0, byte_size(Default) < 256 ->
+ validate_binary_list(client_preferred_next_protocols, PreferredProtocols),
+ validate_npn_ordering(Precedence),
+ Value;
validate_option(client_preferred_next_protocols, undefined) ->
undefined;
validate_option(log_alert, Value) when is_boolean(Value) ->
Value;
-validate_option(next_protocols_advertised = Opt, Value) when is_list(Value) ->
- case tls_record:highest_protocol_version([]) of
- {3,0} ->
- throw({error, {options, {not_supported_in_sslv3, {Opt, Value}}}});
- _ ->
- validate_binary_list(next_protocols_advertised, Value),
- Value
- end;
-
+validate_option(next_protocols_advertised, Value) when is_list(Value) ->
+ validate_binary_list(next_protocols_advertised, Value),
+ Value;
validate_option(next_protocols_advertised, undefined) ->
undefined;
validate_option(server_name_indication = Opt, Value) when is_list(Value) ->
@@ -1483,3 +1467,22 @@ server_name_indication_default(Host) when is_list(Host) ->
Host;
server_name_indication_default(_) ->
undefined.
+
+
+reject_alpn_next_prot_options(Opts) ->
+ AlpnNextOpts = [alpn_advertised_protocols,
+ alpn_preferred_protocols,
+ next_protocols_advertised,
+ next_protocol_selector,
+ client_preferred_next_protocols],
+ reject_alpn_next_prot_options(AlpnNextOpts, Opts).
+
+reject_alpn_next_prot_options([], _) ->
+ ok;
+reject_alpn_next_prot_options([Opt| AlpnNextOpts], Opts) ->
+ case lists:keyfind(Opt, 1, Opts) of
+ {Opt, Value} ->
+ throw({error, {options, {not_supported_in_sslv3, {Opt, Value}}}});
+ false ->
+ reject_alpn_next_prot_options(AlpnNextOpts, Opts)
+ end.
diff --git a/lib/ssl/src/ssl_alert.erl b/lib/ssl/src/ssl_alert.erl
index 696a55e4b9..db415a3666 100644
--- a/lib/ssl/src/ssl_alert.erl
+++ b/lib/ssl/src/ssl_alert.erl
@@ -32,7 +32,7 @@
-include("ssl_record.hrl").
-include("ssl_internal.hrl").
--export([decode/1, alert_txt/1, reason_code/2]).
+-export([decode/1, own_alert_txt/1, alert_txt/1, reason_code/2]).
%%====================================================================
%% Internal application API
@@ -57,16 +57,32 @@ decode(Bin) ->
reason_code(#alert{description = ?CLOSE_NOTIFY}, _) ->
closed;
reason_code(#alert{description = Description}, _) ->
- {tls_alert, description_txt(Description)}.
+ {tls_alert, string:to_lower(description_txt(Description))}.
+
+%%--------------------------------------------------------------------
+-spec own_alert_txt(#alert{}) -> string().
+%%
+%% Description: Returns the error string for given alert generated
+%% by the erlang implementation.
+%%--------------------------------------------------------------------
+own_alert_txt(#alert{level = Level, description = Description, where = {Mod,Line}, reason = undefined, role = Role}) ->
+ "at " ++ Mod ++ ":" ++ integer_to_list(Line) ++ " generated " ++ string:to_upper(atom_to_list(Role)) ++ " ALERT: " ++
+ level_txt(Level) ++ description_txt(Description);
+own_alert_txt(#alert{reason = Reason} = Alert) ->
+ BaseTxt = own_alert_txt(Alert#alert{reason = undefined}),
+ FormatDepth = 9, % Some limit on printed representation of an error
+ ReasonTxt = lists:flatten(io_lib:format("~P", [Reason, FormatDepth])),
+ BaseTxt ++ " - " ++ ReasonTxt.
%%--------------------------------------------------------------------
-spec alert_txt(#alert{}) -> string().
%%
-%% Description: Returns the error string for given alert.
+%% Description: Returns the error string for given alert received from
+%% the peer.
%%--------------------------------------------------------------------
-alert_txt(#alert{level = Level, description = Description, where = {Mod,Line}, reason = undefined}) ->
- Mod ++ ":" ++ integer_to_list(Line) ++ ":" ++
- level_txt(Level) ++" "++ description_txt(Description);
+alert_txt(#alert{level = Level, description = Description, reason = undefined, role = Role}) ->
+ "received " ++ string:to_upper(atom_to_list(Role)) ++ " ALERT: " ++
+ level_txt(Level) ++ description_txt(Description);
alert_txt(#alert{reason = Reason} = Alert) ->
BaseTxt = alert_txt(Alert#alert{reason = undefined}),
FormatDepth = 9, % Some limit on printed representation of an error
@@ -93,73 +109,73 @@ decode(<<>>, Acc, _) ->
lists:reverse(Acc, []).
level_txt(?WARNING) ->
- "Warning:";
+ "Warning - ";
level_txt(?FATAL) ->
- "Fatal error:".
+ "Fatal - ".
description_txt(?CLOSE_NOTIFY) ->
- "close notify";
+ "Close Notify";
description_txt(?UNEXPECTED_MESSAGE) ->
- "unexpected message";
+ "Unexpected Message";
description_txt(?BAD_RECORD_MAC) ->
- "bad record mac";
-description_txt(?DECRYPTION_FAILED) ->
- "decryption failed";
+ "Bad Record MAC";
+description_txt(?DECRYPTION_FAILED_RESERVED) ->
+ "Decryption Failed Reserved";
description_txt(?RECORD_OVERFLOW) ->
- "record overflow";
+ "Record Overflow";
description_txt(?DECOMPRESSION_FAILURE) ->
- "decompression failure";
+ "Decompression Failure";
description_txt(?HANDSHAKE_FAILURE) ->
- "handshake failure";
+ "Handshake Failure";
description_txt(?NO_CERTIFICATE_RESERVED) ->
- "No certificate reserved";
+ "No Certificate Reserved";
description_txt(?BAD_CERTIFICATE) ->
- "bad certificate";
+ "Bad Certificate";
description_txt(?UNSUPPORTED_CERTIFICATE) ->
- "unsupported certificate";
+ "Unsupported Certificate";
description_txt(?CERTIFICATE_REVOKED) ->
- "certificate revoked";
+ "Certificate Revoked";
description_txt(?CERTIFICATE_EXPIRED) ->
- "certificate expired";
+ "Certificate Expired";
description_txt(?CERTIFICATE_UNKNOWN) ->
- "certificate unknown";
+ "Certificate Unknown";
description_txt(?ILLEGAL_PARAMETER) ->
- "illegal parameter";
+ "Illegal Parameter";
description_txt(?UNKNOWN_CA) ->
- "unknown ca";
+ "Unknown CA";
description_txt(?ACCESS_DENIED) ->
- "access denied";
+ "Access Denied";
description_txt(?DECODE_ERROR) ->
- "decode error";
+ "Decode Error";
description_txt(?DECRYPT_ERROR) ->
- "decrypt error";
+ "Decrypt Error";
description_txt(?EXPORT_RESTRICTION) ->
- "export restriction";
+ "Export Restriction";
description_txt(?PROTOCOL_VERSION) ->
- "protocol version";
+ "Protocol Version";
description_txt(?INSUFFICIENT_SECURITY) ->
- "insufficient security";
+ "Insufficient Security";
description_txt(?INTERNAL_ERROR) ->
- "internal error";
+ "Internal Error";
description_txt(?USER_CANCELED) ->
- "user canceled";
+ "User Canceled";
description_txt(?NO_RENEGOTIATION) ->
- "no renegotiation";
+ "No Renegotiation";
description_txt(?UNSUPPORTED_EXTENSION) ->
- "unsupported extension";
+ "Unsupported Extension";
description_txt(?CERTIFICATE_UNOBTAINABLE) ->
- "certificate unobtainable";
+ "Certificate Unobtainable";
description_txt(?UNRECOGNISED_NAME) ->
- "unrecognised name";
+ "Unrecognised Name";
description_txt(?BAD_CERTIFICATE_STATUS_RESPONSE) ->
- "bad certificate status response";
+ "Bad Certificate Status Response";
description_txt(?BAD_CERTIFICATE_HASH_VALUE) ->
- "bad certificate hash value";
+ "Bad Certificate Hash Value";
description_txt(?UNKNOWN_PSK_IDENTITY) ->
- "unknown psk identity";
+ "Unknown Psk Identity";
description_txt(?INAPPROPRIATE_FALLBACK) ->
- "inappropriate fallback";
+ "Inappropriate Fallback";
description_txt(?NO_APPLICATION_PROTOCOL) ->
- "no application protocol";
+ "No application protocol";
description_txt(Enum) ->
lists:flatten(io_lib:format("unsupported/unknown alert: ~p", [Enum])).
diff --git a/lib/ssl/src/ssl_alert.hrl b/lib/ssl/src/ssl_alert.hrl
index f3743ba0f0..35670edea5 100644
--- a/lib/ssl/src/ssl_alert.hrl
+++ b/lib/ssl/src/ssl_alert.hrl
@@ -40,7 +40,7 @@
%% close_notify(0),
%% unexpected_message(10),
%% bad_record_mac(20),
-%% decryption_failed(21),
+%% decryption_failed_reserved(21),
%% record_overflow(22),
%% decompression_failure(30),
%% handshake_failure(40),
@@ -78,7 +78,7 @@
-define(CLOSE_NOTIFY, 0).
-define(UNEXPECTED_MESSAGE, 10).
-define(BAD_RECORD_MAC, 20).
--define(DECRYPTION_FAILED, 21).
+-define(DECRYPTION_FAILED_RESERVED, 21).
-define(RECORD_OVERFLOW, 22).
-define(DECOMPRESSION_FAILURE, 30).
-define(HANDSHAKE_FAILURE, 40).
@@ -118,6 +118,7 @@
level,
description,
where = {?FILE, ?LINE},
+ role,
reason
}).
-endif. % -ifdef(ssl_alert).
diff --git a/lib/ssl/src/ssl_cipher.erl b/lib/ssl/src/ssl_cipher.erl
index bd60197c88..50c5f0d755 100644
--- a/lib/ssl/src/ssl_cipher.erl
+++ b/lib/ssl/src/ssl_cipher.erl
@@ -335,7 +335,9 @@ all_suites(Version) ->
anonymous_suites({3, N}) ->
anonymous_suites(N);
-
+anonymous_suites({254, _} = Version) ->
+ anonymous_suites(dtls_v1:corresponding_tls_version(Version))
+ -- [?TLS_DH_anon_WITH_RC4_128_MD5];
anonymous_suites(N)
when N >= 3 ->
[?TLS_DH_anon_WITH_AES_128_GCM_SHA256,
diff --git a/lib/ssl/src/ssl_connection.erl b/lib/ssl/src/ssl_connection.erl
index fb87662c7b..b031d3d47b 100644
--- a/lib/ssl/src/ssl_connection.erl
+++ b/lib/ssl/src/ssl_connection.erl
@@ -264,7 +264,7 @@ renegotiation(ConnectionPid) ->
%%--------------------------------------------------------------------
-spec prf(pid(), binary() | 'master_secret', binary(),
- binary() | ssl:prf_random(), non_neg_integer()) ->
+ [binary() | ssl:prf_random()], non_neg_integer()) ->
{ok, binary()} | {error, reason()} | {'EXIT', term()}.
%%
%% Description: use a ssl sessions TLS PRF to generate key material
@@ -673,10 +673,11 @@ cipher(internal, #certificate_verify{signature = Signature,
tls_handshake_history = Handshake
} = State0, Connection) ->
+ TLSVersion = ssl:tls_version(Version),
%% Use negotiated value if TLS-1.2 otherwhise return default
- HashSign = negotiated_hashsign(CertHashSign, KexAlg, PublicKeyInfo, Version),
+ HashSign = negotiated_hashsign(CertHashSign, KexAlg, PublicKeyInfo, TLSVersion),
case ssl_handshake:certificate_verify(Signature, PublicKeyInfo,
- ssl:tls_version(Version), HashSign, MasterSecret, Handshake) of
+ TLSVersion, HashSign, MasterSecret, Handshake) of
valid ->
{Record, State} = Connection:next_record(State0),
Connection:next_event(cipher, Record,
@@ -1143,7 +1144,8 @@ handle_alert(#alert{level = ?FATAL} = Alert, StateName,
port = Port, session = Session, user_application = {_Mon, Pid},
role = Role, socket_options = Opts, tracker = Tracker}) ->
invalidate_session(Role, Host, Port, Session),
- log_alert(SslOpts#ssl_options.log_alert, StateName, Alert),
+ log_alert(SslOpts#ssl_options.log_alert, Role, Connection:protocol_name(),
+ StateName, Alert#alert{role = opposite_role(Role)}),
alert_user(Transport, Tracker, Socket, StateName, Opts, Pid, From, Alert, Role, Connection),
{stop, normal};
@@ -1153,15 +1155,18 @@ handle_alert(#alert{level = ?WARNING, description = ?CLOSE_NOTIFY} = Alert,
{stop, {shutdown, peer_close}};
handle_alert(#alert{level = ?WARNING, description = ?NO_RENEGOTIATION} = Alert, StateName,
- #state{ssl_options = SslOpts, renegotiation = {true, internal}} = State) ->
- log_alert(SslOpts#ssl_options.log_alert, StateName, Alert),
+ #state{role = Role, ssl_options = SslOpts, protocol_cb = Connection, renegotiation = {true, internal}} = State) ->
+ log_alert(SslOpts#ssl_options.log_alert, Role,
+ Connection:protocol_name(), StateName, Alert#alert{role = opposite_role(Role)}),
handle_normal_shutdown(Alert, StateName, State),
{stop, {shutdown, peer_close}};
handle_alert(#alert{level = ?WARNING, description = ?NO_RENEGOTIATION} = Alert, StateName,
- #state{ssl_options = SslOpts, renegotiation = {true, From},
+ #state{role = Role,
+ ssl_options = SslOpts, renegotiation = {true, From},
protocol_cb = Connection} = State0) ->
- log_alert(SslOpts#ssl_options.log_alert, StateName, Alert),
+ log_alert(SslOpts#ssl_options.log_alert, Role,
+ Connection:protocol_name(), StateName, Alert#alert{role = opposite_role(Role)}),
gen_statem:reply(From, {error, renegotiation_rejected}),
{Record, State} = Connection:next_record(State0),
%% Go back to connection!
@@ -1169,8 +1174,9 @@ handle_alert(#alert{level = ?WARNING, description = ?NO_RENEGOTIATION} = Alert,
%% Gracefully log and ignore all other warning alerts
handle_alert(#alert{level = ?WARNING} = Alert, StateName,
- #state{ssl_options = SslOpts, protocol_cb = Connection} = State0) ->
- log_alert(SslOpts#ssl_options.log_alert, StateName, Alert),
+ #state{ssl_options = SslOpts, protocol_cb = Connection, role = Role} = State0) ->
+ log_alert(SslOpts#ssl_options.log_alert, Role,
+ Connection:protocol_name(), StateName, Alert#alert{role = opposite_role(Role)}),
{Record, State} = Connection:next_record(State0),
Connection:next_event(StateName, Record, State).
@@ -2370,18 +2376,22 @@ alert_user(Transport, Tracker, Socket, Active, Pid, From, Alert, Role, Connectio
Transport, Socket, Connection, Tracker), ReasonCode})
end.
-log_alert(true, Info, Alert) ->
+log_alert(true, Role, ProtocolName, StateName, #alert{role = Role} = Alert) ->
+ Txt = ssl_alert:own_alert_txt(Alert),
+ error_logger:info_report(io_lib:format("~s ~p: In state ~p ~s\n", [ProtocolName, Role, StateName, Txt]));
+log_alert(true, Role, ProtocolName, StateName, Alert) ->
Txt = ssl_alert:alert_txt(Alert),
- error_logger:format("SSL: ~p: ~s\n", [Info, Txt]);
-log_alert(false, _, _) ->
+ error_logger:info_report(io_lib:format("~s ~p: In state ~p ~s\n", [ProtocolName, Role, StateName, Txt]));
+log_alert(false, _, _, _, _) ->
ok.
handle_own_alert(Alert, Version, StateName,
- #state{transport_cb = Transport,
- socket = Socket,
- protocol_cb = Connection,
- connection_states = ConnectionStates,
- ssl_options = SslOpts} = State) ->
+ #state{role = Role,
+ transport_cb = Transport,
+ socket = Socket,
+ protocol_cb = Connection,
+ connection_states = ConnectionStates,
+ ssl_options = SslOpts} = State) ->
try %% Try to tell the other side
{BinMsg, _} =
Connection:encode_alert(Alert, Version, ConnectionStates),
@@ -2390,7 +2400,7 @@ handle_own_alert(Alert, Version, StateName,
ignore
end,
try %% Try to tell the local user
- log_alert(SslOpts#ssl_options.log_alert, StateName, Alert),
+ log_alert(SslOpts#ssl_options.log_alert, Role, Connection:protocol_name(), StateName, Alert#alert{role = Role}),
handle_normal_shutdown(Alert,StateName, State)
catch _:_ ->
ok
diff --git a/lib/ssl/src/ssl_handshake.erl b/lib/ssl/src/ssl_handshake.erl
index 3cf466e78f..b1661624b5 100644
--- a/lib/ssl/src/ssl_handshake.erl
+++ b/lib/ssl/src/ssl_handshake.erl
@@ -415,9 +415,11 @@ certify(#certificate{asn1_certificates = ASN1Certs}, CertDbHandle, CertDbRef,
path_validation_alert(Reason)
end
catch
- error:_ ->
+ error:{badmatch,{asn1, Asn1Reason}} ->
%% ASN-1 decode of certificate somehow failed
- ?ALERT_REC(?FATAL, ?CERTIFICATE_UNKNOWN, failed_to_decode_certificate)
+ ?ALERT_REC(?FATAL, ?CERTIFICATE_UNKNOWN, {failed_to_decode_certificate, Asn1Reason});
+ error:OtherReason ->
+ ?ALERT_REC(?FATAL, ?INTERNAL_ERROR, {unexpected_error, OtherReason})
end.
%%--------------------------------------------------------------------
@@ -1611,8 +1613,11 @@ path_validation_alert({bad_cert, unknown_critical_extension}) ->
?ALERT_REC(?FATAL, ?UNSUPPORTED_CERTIFICATE);
path_validation_alert({bad_cert, {revoked, _}}) ->
?ALERT_REC(?FATAL, ?CERTIFICATE_REVOKED);
-path_validation_alert({bad_cert, revocation_status_undetermined}) ->
- ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
+%%path_validation_alert({bad_cert, revocation_status_undetermined}) ->
+%% ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
+path_validation_alert({bad_cert, {revocation_status_undetermined, Details}}) ->
+ Alert = ?ALERT_REC(?FATAL, ?BAD_CERTIFICATE),
+ Alert#alert{reason = Details};
path_validation_alert({bad_cert, selfsigned_peer}) ->
?ALERT_REC(?FATAL, ?BAD_CERTIFICATE);
path_validation_alert({bad_cert, unknown_ca}) ->
@@ -2189,7 +2194,8 @@ crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _, C
ssl_crl:trusted_cert_and_path(CRL, Issuer, {CertPath,
DBInfo})
end, {CertDbHandle, CertDbRef}}},
- {update_crl, fun(DP, CRL) -> Callback:fresh_crl(DP, CRL) end}
+ {update_crl, fun(DP, CRL) -> Callback:fresh_crl(DP, CRL) end},
+ {undetermined_details, true}
],
case dps_and_crls(OtpCert, Callback, CRLDbHandle, ext) of
no_dps ->
@@ -2199,7 +2205,7 @@ crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _, C
DpsAndCRLs -> %% This DP list may be empty if relevant CRLs existed
%% but could not be retrived, will result in {bad_cert, revocation_status_undetermined}
case public_key:pkix_crls_validate(OtpCert, DpsAndCRLs, Options) of
- {bad_cert, revocation_status_undetermined} ->
+ {bad_cert, {revocation_status_undetermined, _}} ->
crl_check_same_issuer(OtpCert, Check, dps_and_crls(OtpCert, Callback,
CRLDbHandle, same_issuer), Options);
Other ->
@@ -2209,7 +2215,7 @@ crl_check(OtpCert, Check, CertDbHandle, CertDbRef, {Callback, CRLDbHandle}, _, C
crl_check_same_issuer(OtpCert, best_effort, Dps, Options) ->
case public_key:pkix_crls_validate(OtpCert, Dps, Options) of
- {bad_cert, revocation_status_undetermined} ->
+ {bad_cert, {revocation_status_undetermined, _}} ->
valid;
Other ->
Other
diff --git a/lib/ssl/src/ssl_manager.erl b/lib/ssl/src/ssl_manager.erl
index ca9aaf4660..f44fe6a2bf 100644
--- a/lib/ssl/src/ssl_manager.erl
+++ b/lib/ssl/src/ssl_manager.erl
@@ -78,7 +78,7 @@
name(normal) ->
?MODULE;
name(dist) ->
- list_to_atom(atom_to_list(?MODULE) ++ "dist").
+ list_to_atom(atom_to_list(?MODULE) ++ "_dist").
%%--------------------------------------------------------------------
-spec start_link(list()) -> {ok, pid()} | ignore | {error, term()}.
@@ -563,7 +563,7 @@ server_register_session(Port, Session, #state{session_cache_server_max = Max,
do_register_session(Key, Session, Max, Pid, Cache, CacheCb) ->
try CacheCb:size(Cache) of
- Max ->
+ Size when Size >= Max ->
invalidate_session_cache(Pid, CacheCb, Cache);
_ ->
CacheCb:update(Cache, Key, Session),
diff --git a/lib/ssl/src/ssl_pem_cache.erl b/lib/ssl/src/ssl_pem_cache.erl
index 6cc0729208..115ab4451d 100644
--- a/lib/ssl/src/ssl_pem_cache.erl
+++ b/lib/ssl/src/ssl_pem_cache.erl
@@ -65,7 +65,7 @@
name(normal) ->
?MODULE;
name(dist) ->
- list_to_atom(atom_to_list(?MODULE) ++ "dist").
+ list_to_atom(atom_to_list(?MODULE) ++ "_dist").
%%--------------------------------------------------------------------
-spec start_link(list()) -> {ok, pid()} | ignore | {error, term()}.
diff --git a/lib/ssl/src/ssl_pkix_db.erl b/lib/ssl/src/ssl_pkix_db.erl
index b28636569d..8828c3a0d8 100644
--- a/lib/ssl/src/ssl_pkix_db.erl
+++ b/lib/ssl/src/ssl_pkix_db.erl
@@ -76,10 +76,17 @@ remove(Dbs) ->
true = ets:delete(Db1);
(undefined) ->
ok;
- (ssl_pem_cache) ->
- ok;
- (ssl_pem_cache_dist) ->
- ok;
+ (Name) when is_atom(Name) ->
+ NormalName = ssl_pem_cache:name(normal),
+ DistName = ssl_pem_cache:name(dist),
+ case Name of
+ NormalName ->
+ ok;
+ DistName ->
+ ok;
+ _ ->
+ true = ets:delete(Name)
+ end;
(Db) ->
true = ets:delete(Db)
end, Dbs).
diff --git a/lib/ssl/src/tls_connection.erl b/lib/ssl/src/tls_connection.erl
index 352874c77d..e3ffbea3d3 100644
--- a/lib/ssl/src/tls_connection.erl
+++ b/lib/ssl/src/tls_connection.erl
@@ -56,7 +56,7 @@
reinit_handshake_data/1, select_sni_extension/1]).
%% Alert and close handling
--export([send_alert/2, close/5]).
+-export([send_alert/2, close/5, protocol_name/0]).
%% Data handling
-export([passive_receive/2, next_record_if_active/1, handle_common_event/4, send/3,
@@ -164,6 +164,8 @@ encode_data(Data, Version, ConnectionStates0)->
encode_alert(#alert{} = Alert, Version, ConnectionStates) ->
tls_record:encode_alert_record(Alert, Version, ConnectionStates).
+protocol_name() ->
+ "TLS".
%%====================================================================
%% tls_connection_sup API
%%====================================================================
@@ -719,7 +721,7 @@ close(downgrade, _,_,_,_) ->
%% Other
close(_, Socket, Transport, _,_) ->
Transport:close(Socket).
-
+
convert_state(#state{ssl_options = Options} = State, up, "5.3.5", "5.3.6") ->
State#state{ssl_options = convert_options_partial_chain(Options, up)};
convert_state(#state{ssl_options = Options} = State, down, "5.3.6", "5.3.5") ->
diff --git a/lib/ssl/test/Makefile b/lib/ssl/test/Makefile
index 558be6d642..c7e2f402af 100644
--- a/lib/ssl/test/Makefile
+++ b/lib/ssl/test/Makefile
@@ -56,7 +56,6 @@ MODULES = \
ssl_upgrade_SUITE\
ssl_sni_SUITE \
make_certs\
- erl_make_certs\
x509_test
diff --git a/lib/ssl/test/erl_make_certs.erl b/lib/ssl/test/erl_make_certs.erl
deleted file mode 100644
index 3ab6222780..0000000000
--- a/lib/ssl/test/erl_make_certs.erl
+++ /dev/null
@@ -1,477 +0,0 @@
-%%
-%% %CopyrightBegin%
-%%
-%% Copyright Ericsson AB 2011-2017. All Rights Reserved.
-%%
-%% Licensed under the Apache License, Version 2.0 (the "License");
-%% you may not use this file except in compliance with the License.
-%% You may obtain a copy of the License at
-%%
-%% http://www.apache.org/licenses/LICENSE-2.0
-%%
-%% Unless required by applicable law or agreed to in writing, software
-%% distributed under the License is distributed on an "AS IS" BASIS,
-%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-%% See the License for the specific language governing permissions and
-%% limitations under the License.
-%%
-%% %CopyrightEnd%
-%%
-
-%% Create test certificates
-
--module(erl_make_certs).
--include_lib("public_key/include/public_key.hrl").
-
--export([make_cert/1, gen_rsa/1, verify_signature/3, write_pem/3]).
--compile(export_all).
-
-%%--------------------------------------------------------------------
-%% @doc Create and return a der encoded certificate
-%% Option Default
-%% -------------------------------------------------------
-%% digest sha1
-%% validity {date(), date() + week()}
-%% version 3
-%% subject [] list of the following content
-%% {name, Name}
-%% {email, Email}
-%% {city, City}
-%% {state, State}
-%% {org, Org}
-%% {org_unit, OrgUnit}
-%% {country, Country}
-%% {serial, Serial}
-%% {title, Title}
-%% {dnQualifer, DnQ}
-%% issuer = {Issuer, IssuerKey} true (i.e. a ca cert is created)
-%% (obs IssuerKey migth be {Key, Password}
-%% key = KeyFile|KeyBin|rsa|dsa|ec Subject PublicKey rsa, dsa or ec generates key
-%%
-%%
-%% (OBS: The generated keys are for testing only)
-%% @spec ([{::atom(), ::term()}]) -> {Cert::binary(), Key::binary()}
-%% @end
-%%--------------------------------------------------------------------
-
-make_cert(Opts) ->
- SubjectPrivateKey = get_key(Opts),
- {TBSCert, IssuerKey} = make_tbs(SubjectPrivateKey, Opts),
- Cert = public_key:pkix_sign(TBSCert, IssuerKey),
- true = verify_signature(Cert, IssuerKey, undef), %% verify that the keys where ok
- {Cert, encode_key(SubjectPrivateKey)}.
-
-%%--------------------------------------------------------------------
-%% @doc Writes pem files in Dir with FileName ++ ".pem" and FileName ++ "_key.pem"
-%% @spec (::string(), ::string(), {Cert,Key}) -> ok
-%% @end
-%%--------------------------------------------------------------------
-write_pem(Dir, FileName, {Cert, Key = {_,_,not_encrypted}}) when is_binary(Cert) ->
- ok = der_to_pem(filename:join(Dir, FileName ++ ".pem"),
- [{'Certificate', Cert, not_encrypted}]),
- ok = der_to_pem(filename:join(Dir, FileName ++ "_key.pem"), [Key]).
-
-%%--------------------------------------------------------------------
-%% @doc Creates a rsa key (OBS: for testing only)
-%% the size are in bytes
-%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()}
-%% @end
-%%--------------------------------------------------------------------
-gen_rsa(Size) when is_integer(Size) ->
- Key = gen_rsa2(Size),
- {Key, encode_key(Key)}.
-
-%%--------------------------------------------------------------------
-%% @doc Creates a dsa key (OBS: for testing only)
-%% the sizes are in bytes
-%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()}
-%% @end
-%%--------------------------------------------------------------------
-gen_dsa(LSize,NSize) when is_integer(LSize), is_integer(NSize) ->
- Key = gen_dsa2(LSize, NSize),
- {Key, encode_key(Key)}.
-
-%%--------------------------------------------------------------------
-%% @doc Creates a ec key (OBS: for testing only)
-%% the sizes are in bytes
-%% @spec (::integer()) -> {::atom(), ::binary(), ::opaque()}
-%% @end
-%%--------------------------------------------------------------------
-gen_ec(Curve) when is_atom(Curve) ->
- Key = gen_ec2(Curve),
- {Key, encode_key(Key)}.
-
-%%--------------------------------------------------------------------
-%% @doc Verifies cert signatures
-%% @spec (::binary(), ::tuple()) -> ::boolean()
-%% @end
-%%--------------------------------------------------------------------
-verify_signature(DerEncodedCert, DerKey, _KeyParams) ->
- Key = decode_key(DerKey),
- case Key of
- #'RSAPrivateKey'{modulus=Mod, publicExponent=Exp} ->
- public_key:pkix_verify(DerEncodedCert,
- #'RSAPublicKey'{modulus=Mod, publicExponent=Exp});
- #'DSAPrivateKey'{p=P, q=Q, g=G, y=Y} ->
- public_key:pkix_verify(DerEncodedCert, {Y, #'Dss-Parms'{p=P, q=Q, g=G}});
- #'ECPrivateKey'{version = _Version, privateKey = _PrivKey,
- parameters = Params, publicKey = PubKey} ->
- public_key:pkix_verify(DerEncodedCert, {#'ECPoint'{point = PubKey}, Params})
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%% Implementation %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-get_key(Opts) ->
- case proplists:get_value(key, Opts) of
- undefined -> make_key(rsa, Opts);
- rsa -> make_key(rsa, Opts);
- dsa -> make_key(dsa, Opts);
- ec -> make_key(ec, Opts);
- Key ->
- Password = proplists:get_value(password, Opts, no_passwd),
- decode_key(Key, Password)
- end.
-
-decode_key({Key, Pw}) ->
- decode_key(Key, Pw);
-decode_key(Key) ->
- decode_key(Key, no_passwd).
-
-
-decode_key(#'RSAPublicKey'{} = Key,_) ->
- Key;
-decode_key(#'RSAPrivateKey'{} = Key,_) ->
- Key;
-decode_key(#'DSAPrivateKey'{} = Key,_) ->
- Key;
-decode_key(#'ECPrivateKey'{} = Key,_) ->
- Key;
-decode_key(PemEntry = {_,_,_}, Pw) ->
- public_key:pem_entry_decode(PemEntry, Pw);
-decode_key(PemBin, Pw) ->
- [KeyInfo] = public_key:pem_decode(PemBin),
- decode_key(KeyInfo, Pw).
-
-encode_key(Key = #'RSAPrivateKey'{}) ->
- {ok, Der} = 'OTP-PUB-KEY':encode('RSAPrivateKey', Key),
- {'RSAPrivateKey', Der, not_encrypted};
-encode_key(Key = #'DSAPrivateKey'{}) ->
- {ok, Der} = 'OTP-PUB-KEY':encode('DSAPrivateKey', Key),
- {'DSAPrivateKey', Der, not_encrypted};
-encode_key(Key = #'ECPrivateKey'{}) ->
- {ok, Der} = 'OTP-PUB-KEY':encode('ECPrivateKey', Key),
- {'ECPrivateKey', Der, not_encrypted}.
-
-make_tbs(SubjectKey, Opts) ->
- Version = list_to_atom("v"++integer_to_list(proplists:get_value(version, Opts, 3))),
-
- IssuerProp = proplists:get_value(issuer, Opts, true),
- {Issuer, IssuerKey} = issuer(IssuerProp, Opts, SubjectKey),
-
- {Algo, Parameters} = sign_algorithm(IssuerKey, Opts),
-
- SignAlgo = #'SignatureAlgorithm'{algorithm = Algo,
- parameters = Parameters},
- Subject = case IssuerProp of
- true -> %% Is a Root Ca
- Issuer;
- _ ->
- subject(proplists:get_value(subject, Opts),false)
- end,
-
- {#'OTPTBSCertificate'{serialNumber = trunc(rand:uniform()*100000000)*10000 + 1,
- signature = SignAlgo,
- issuer = Issuer,
- validity = validity(Opts),
- subject = Subject,
- subjectPublicKeyInfo = publickey(SubjectKey),
- version = Version,
- extensions = extensions(Opts)
- }, IssuerKey}.
-
-issuer(true, Opts, SubjectKey) ->
- %% Self signed
- {subject(proplists:get_value(subject, Opts), true), SubjectKey};
-issuer({Issuer, IssuerKey}, _Opts, _SubjectKey) when is_binary(Issuer) ->
- {issuer_der(Issuer), decode_key(IssuerKey)};
-issuer({File, IssuerKey}, _Opts, _SubjectKey) when is_list(File) ->
- {ok, [{cert, Cert, _}|_]} = pem_to_der(File),
- {issuer_der(Cert), decode_key(IssuerKey)}.
-
-issuer_der(Issuer) ->
- Decoded = public_key:pkix_decode_cert(Issuer, otp),
- #'OTPCertificate'{tbsCertificate=Tbs} = Decoded,
- #'OTPTBSCertificate'{subject=Subject} = Tbs,
- Subject.
-
-subject(undefined, IsRootCA) ->
- User = if IsRootCA -> "RootCA"; true -> os:getenv("USER", "test_user") end,
- Opts = [{email, User ++ "@erlang.org"},
- {name, User},
- {city, "Stockholm"},
- {country, "SE"},
- {org, "erlang"},
- {org_unit, "testing dep"}],
- subject(Opts);
-subject(Opts, _) ->
- subject(Opts).
-
-subject(SubjectOpts) when is_list(SubjectOpts) ->
- Encode = fun(Opt) ->
- {Type,Value} = subject_enc(Opt),
- [#'AttributeTypeAndValue'{type=Type, value=Value}]
- end,
- {rdnSequence, [Encode(Opt) || Opt <- SubjectOpts]}.
-
-%% Fill in the blanks
-subject_enc({name, Name}) -> {?'id-at-commonName', {printableString, Name}};
-subject_enc({email, Email}) -> {?'id-emailAddress', Email};
-subject_enc({city, City}) -> {?'id-at-localityName', {printableString, City}};
-subject_enc({state, State}) -> {?'id-at-stateOrProvinceName', {printableString, State}};
-subject_enc({org, Org}) -> {?'id-at-organizationName', {printableString, Org}};
-subject_enc({org_unit, OrgUnit}) -> {?'id-at-organizationalUnitName', {printableString, OrgUnit}};
-subject_enc({country, Country}) -> {?'id-at-countryName', Country};
-subject_enc({serial, Serial}) -> {?'id-at-serialNumber', Serial};
-subject_enc({title, Title}) -> {?'id-at-title', {printableString, Title}};
-subject_enc({dnQualifer, DnQ}) -> {?'id-at-dnQualifier', DnQ};
-subject_enc(Other) -> Other.
-
-
-extensions(Opts) ->
- case proplists:get_value(extensions, Opts, []) of
- false ->
- asn1_NOVALUE;
- Exts ->
- lists:flatten([extension(Ext) || Ext <- default_extensions(Exts)])
- end.
-
-default_extensions(Exts) ->
- Def = [{key_usage,undefined},
- {subject_altname, undefined},
- {issuer_altname, undefined},
- {basic_constraints, default},
- {name_constraints, undefined},
- {policy_constraints, undefined},
- {ext_key_usage, undefined},
- {inhibit_any, undefined},
- {auth_key_id, undefined},
- {subject_key_id, undefined},
- {policy_mapping, undefined}],
- Filter = fun({Key, _}, D) -> lists:keydelete(Key, 1, D) end,
- Exts ++ lists:foldl(Filter, Def, Exts).
-
-extension({_, undefined}) -> [];
-extension({basic_constraints, Data}) ->
- case Data of
- default ->
- #'Extension'{extnID = ?'id-ce-basicConstraints',
- extnValue = #'BasicConstraints'{cA=true},
- critical=true};
- false ->
- [];
- Len when is_integer(Len) ->
- #'Extension'{extnID = ?'id-ce-basicConstraints',
- extnValue = #'BasicConstraints'{cA=true, pathLenConstraint=Len},
- critical=true};
- _ ->
- #'Extension'{extnID = ?'id-ce-basicConstraints',
- extnValue = Data}
- end;
-extension({Id, Data, Critical}) ->
- #'Extension'{extnID = Id, extnValue = Data, critical = Critical}.
-
-
-publickey(#'RSAPrivateKey'{modulus=N, publicExponent=E}) ->
- Public = #'RSAPublicKey'{modulus=N, publicExponent=E},
- Algo = #'PublicKeyAlgorithm'{algorithm= ?rsaEncryption, parameters='NULL'},
- #'OTPSubjectPublicKeyInfo'{algorithm = Algo,
- subjectPublicKey = Public};
-publickey(#'DSAPrivateKey'{p=P, q=Q, g=G, y=Y}) ->
- Algo = #'PublicKeyAlgorithm'{algorithm= ?'id-dsa',
- parameters={params, #'Dss-Parms'{p=P, q=Q, g=G}}},
- #'OTPSubjectPublicKeyInfo'{algorithm = Algo, subjectPublicKey = Y};
-publickey(#'ECPrivateKey'{version = _Version,
- privateKey = _PrivKey,
- parameters = Params,
- publicKey = PubKey}) ->
- Algo = #'PublicKeyAlgorithm'{algorithm= ?'id-ecPublicKey', parameters=Params},
- #'OTPSubjectPublicKeyInfo'{algorithm = Algo,
- subjectPublicKey = #'ECPoint'{point = PubKey}}.
-
-validity(Opts) ->
- DefFrom0 = calendar:gregorian_days_to_date(calendar:date_to_gregorian_days(date())-1),
- DefTo0 = calendar:gregorian_days_to_date(calendar:date_to_gregorian_days(date())+7),
- {DefFrom, DefTo} = proplists:get_value(validity, Opts, {DefFrom0, DefTo0}),
- Format = fun({Y,M,D}) -> lists:flatten(io_lib:format("~w~2..0w~2..0w000000Z",[Y,M,D])) end,
- #'Validity'{notBefore={generalTime, Format(DefFrom)},
- notAfter ={generalTime, Format(DefTo)}}.
-
-sign_algorithm(#'RSAPrivateKey'{}, Opts) ->
- Type = case proplists:get_value(digest, Opts, sha1) of
- sha1 -> ?'sha1WithRSAEncryption';
- sha512 -> ?'sha512WithRSAEncryption';
- sha384 -> ?'sha384WithRSAEncryption';
- sha256 -> ?'sha256WithRSAEncryption';
- md5 -> ?'md5WithRSAEncryption';
- md2 -> ?'md2WithRSAEncryption'
- end,
- {Type, 'NULL'};
-sign_algorithm(#'DSAPrivateKey'{p=P, q=Q, g=G}, _Opts) ->
- {?'id-dsa-with-sha1', {params,#'Dss-Parms'{p=P, q=Q, g=G}}};
-sign_algorithm(#'ECPrivateKey'{parameters = Parms}, Opts) ->
- Type = case proplists:get_value(digest, Opts, sha1) of
- sha1 -> ?'ecdsa-with-SHA1';
- sha512 -> ?'ecdsa-with-SHA512';
- sha384 -> ?'ecdsa-with-SHA384';
- sha256 -> ?'ecdsa-with-SHA256'
- end,
- {Type, Parms}.
-
-make_key(rsa, _Opts) ->
- %% (OBS: for testing only)
- gen_rsa2(64);
-make_key(dsa, _Opts) ->
- gen_dsa2(128, 20); %% Bytes i.e. {1024, 160}
-make_key(ec, _Opts) ->
- %% (OBS: for testing only)
- CurveOid = hd(tls_v1:ecc_curves(0)),
- NamedCurve = pubkey_cert_records:namedCurves(CurveOid),
- gen_ec2(NamedCurve).
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% RSA key generation (OBS: for testing only)
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
--define(SMALL_PRIMES, [65537,97,89,83,79,73,71,67,61,59,53,
- 47,43,41,37,31,29,23,19,17,13,11,7,5,3]).
-
-gen_rsa2(Size) ->
- P = prime(Size),
- Q = prime(Size),
- N = P*Q,
- Tot = (P - 1) * (Q - 1),
- [E|_] = lists:dropwhile(fun(Candidate) -> (Tot rem Candidate) == 0 end, ?SMALL_PRIMES),
- {D1,D2} = extended_gcd(E, Tot),
- D = erlang:max(D1,D2),
- case D < E of
- true ->
- gen_rsa2(Size);
- false ->
- {Co1,Co2} = extended_gcd(Q, P),
- Co = erlang:max(Co1,Co2),
- #'RSAPrivateKey'{version = 'two-prime',
- modulus = N,
- publicExponent = E,
- privateExponent = D,
- prime1 = P,
- prime2 = Q,
- exponent1 = D rem (P-1),
- exponent2 = D rem (Q-1),
- coefficient = Co
- }
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% DSA key generation (OBS: for testing only)
-%% See http://en.wikipedia.org/wiki/Digital_Signature_Algorithm
-%% and the fips_186-3.pdf
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-gen_dsa2(LSize, NSize) ->
- Q = prime(NSize), %% Choose N-bit prime Q
- X0 = prime(LSize),
- P0 = prime((LSize div 2) +1),
-
- %% Choose L-bit prime modulus P such that p-1 is a multiple of q.
- case dsa_search(X0 div (2*Q*P0), P0, Q, 1000) of
- error ->
- gen_dsa2(LSize, NSize);
- P ->
- G = crypto:mod_pow(2, (P-1) div Q, P), % Choose G a number whose multiplicative order modulo p is q.
- %% such that This may be done by setting g = h^(p-1)/q mod p, commonly h=2 is used.
-
- X = prime(20), %% Choose x by some random method, where 0 < x < q.
- Y = crypto:mod_pow(G, X, P), %% Calculate y = g^x mod p.
-
- #'DSAPrivateKey'{version=0, p = P, q = Q,
- g = crypto:bytes_to_integer(G), y = crypto:bytes_to_integer(Y), x = X}
- end.
-
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-%% EC key generation (OBS: for testing only)
-%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-gen_ec2(CurveId) ->
- {PubKey, PrivKey} = crypto:generate_key(ecdh, CurveId),
-
- #'ECPrivateKey'{version = 1,
- privateKey = PrivKey,
- parameters = {namedCurve, pubkey_cert_records:namedCurves(CurveId)},
- publicKey = PubKey}.
-
-%% See fips_186-3.pdf
-dsa_search(T, P0, Q, Iter) when Iter > 0 ->
- P = 2*T*Q*P0 + 1,
- case is_prime(P, 50) of
- true -> P;
- false -> dsa_search(T+1, P0, Q, Iter-1)
- end;
-dsa_search(_,_,_,_) ->
- error.
-
-
-%%%%%%% Crypto Math %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-prime(ByteSize) ->
- Rand = odd_rand(ByteSize),
- prime_odd(Rand, 0).
-
-prime_odd(Rand, N) ->
- case is_prime(Rand, 50) of
- true ->
- Rand;
- false ->
- prime_odd(Rand+2, N+1)
- end.
-
-%% see http://en.wikipedia.org/wiki/Fermat_primality_test
-is_prime(_, 0) -> true;
-is_prime(Candidate, Test) ->
- CoPrime = odd_rand(10000, Candidate),
- Result = crypto:mod_pow(CoPrime, Candidate, Candidate) ,
- is_prime(CoPrime, crypto:bytes_to_integer(Result), Candidate, Test).
-
-is_prime(CoPrime, CoPrime, Candidate, Test) ->
- is_prime(Candidate, Test-1);
-is_prime(_,_,_,_) ->
- false.
-
-odd_rand(Size) ->
- Min = 1 bsl (Size*8-1),
- Max = (1 bsl (Size*8))-1,
- odd_rand(Min, Max).
-
-odd_rand(Min,Max) ->
- Rand = crypto:rand_uniform(Min,Max),
- case Rand rem 2 of
- 0 ->
- Rand + 1;
- _ ->
- Rand
- end.
-
-extended_gcd(A, B) ->
- case A rem B of
- 0 ->
- {0, 1};
- N ->
- {X, Y} = extended_gcd(B, N),
- {Y, X-Y*(A div B)}
- end.
-
-pem_to_der(File) ->
- {ok, PemBin} = file:read_file(File),
- public_key:pem_decode(PemBin).
-
-der_to_pem(File, Entries) ->
- PemBin = public_key:pem_encode(Entries),
- file:write_file(File, PemBin).
-
diff --git a/lib/ssl/test/ssl_ECC_SUITE.erl b/lib/ssl/test/ssl_ECC_SUITE.erl
index 0fbb0bb79a..64e8042b25 100644
--- a/lib/ssl/test/ssl_ECC_SUITE.erl
+++ b/lib/ssl/test/ssl_ECC_SUITE.erl
@@ -36,7 +36,9 @@ all() ->
[
{group, 'tlsv1.2'},
{group, 'tlsv1.1'},
- {group, 'tlsv1'}
+ {group, 'tlsv1'},
+ {group, 'dtlsv1.2'},
+ {group, 'dtlsv1'}
].
groups() ->
@@ -44,6 +46,8 @@ groups() ->
{'tlsv1.2', [], all_versions_groups()},
{'tlsv1.1', [], all_versions_groups()},
{'tlsv1', [], all_versions_groups()},
+ {'dtlsv1.2', [], all_versions_groups()},
+ {'dtlsv1', [], all_versions_groups()},
{'erlang_server', [], openssl_key_cert_combinations()},
%%{'erlang_client', [], openssl_key_cert_combinations()},
{'erlang', [], key_cert_combinations() ++ misc()
@@ -196,8 +200,14 @@ common_init_per_group(GroupName, Config) ->
openssl_check(GroupName, Config)
end.
-end_per_group(_GroupName, Config) ->
- Config.
+end_per_group(GroupName, Config0) ->
+ case ssl_test_lib:is_tls_version(GroupName) of
+ true ->
+ Config = ssl_test_lib:clean_tls_version(Config0),
+ proplists:delete(tls_version, Config);
+ false ->
+ Config0
+ end.
%%--------------------------------------------------------------------
diff --git a/lib/ssl/test/ssl_alpn_handshake_SUITE.erl b/lib/ssl/test/ssl_alpn_handshake_SUITE.erl
index 158b3524ac..055f05a900 100644
--- a/lib/ssl/test/ssl_alpn_handshake_SUITE.erl
+++ b/lib/ssl/test/ssl_alpn_handshake_SUITE.erl
@@ -35,14 +35,19 @@ all() ->
[{group, 'tlsv1.2'},
{group, 'tlsv1.1'},
{group, 'tlsv1'},
- {group, 'sslv3'}].
+ {group, 'sslv3'},
+ {group, 'dtlsv1.2'},
+ {group, 'dtlsv1'}
+ ].
groups() ->
[
{'tlsv1.2', [], alpn_tests()},
{'tlsv1.1', [], alpn_tests()},
{'tlsv1', [], alpn_tests()},
- {'sslv3', [], alpn_not_supported()}
+ {'sslv3', [], alpn_not_supported()},
+ {'dtlsv1.2', [], alpn_tests() -- [client_renegotiate]},
+ {'dtlsv1', [], alpn_tests() -- [client_renegotiate]}
].
alpn_tests() ->
@@ -67,13 +72,12 @@ alpn_not_supported() ->
alpn_not_supported_server
].
-init_per_suite(Config) ->
+init_per_suite(Config0) ->
catch crypto:stop(),
try crypto:start() of
ok ->
ssl_test_lib:clean_start(),
- {ok, _} = make_certs:all(proplists:get_value(data_dir, Config),
- proplists:get_value(priv_dir, Config)),
+ Config = ssl_test_lib:make_rsa_cert(Config0),
ssl_test_lib:cert_options(Config)
catch _:_ ->
{skip, "Crypto did not start"}
@@ -90,8 +94,7 @@ init_per_group(GroupName, Config) ->
true ->
case ssl_test_lib:sufficient_crypto_support(GroupName) of
true ->
- ssl_test_lib:init_tls_version(GroupName, Config),
- Config;
+ ssl_test_lib:init_tls_version(GroupName, Config);
false ->
{skip, "Missing crypto support"}
end;
@@ -100,8 +103,14 @@ init_per_group(GroupName, Config) ->
Config
end.
-end_per_group(_GroupName, Config) ->
- Config.
+end_per_group(GroupName, Config) ->
+ case ssl_test_lib:is_tls_version(GroupName) of
+ true ->
+ ssl_test_lib:clean_tls_version(Config);
+ false ->
+ Config
+ end.
+
init_per_testcase(_TestCase, Config) ->
ssl_test_lib:ct_log_supported_protocol_versions(Config),
@@ -116,26 +125,29 @@ end_per_testcase(_TestCase, Config) ->
%%--------------------------------------------------------------------
empty_protocols_are_not_allowed(Config) when is_list(Config) ->
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
{error, {options, {alpn_preferred_protocols, {invalid_protocol, <<>>}}}}
= (catch ssl:listen(9443,
- [{alpn_preferred_protocols, [<<"foo/1">>, <<"">>]}])),
+ [{alpn_preferred_protocols, [<<"foo/1">>, <<"">>]}| ServerOpts])),
{error, {options, {alpn_advertised_protocols, {invalid_protocol, <<>>}}}}
= (catch ssl:connect({127,0,0,1}, 9443,
- [{alpn_advertised_protocols, [<<"foo/1">>, <<"">>]}])).
+ [{alpn_advertised_protocols, [<<"foo/1">>, <<"">>]} | ServerOpts])).
%--------------------------------------------------------------------------------
protocols_must_be_a_binary_list(Config) when is_list(Config) ->
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
Option1 = {alpn_preferred_protocols, hello},
- {error, {options, Option1}} = (catch ssl:listen(9443, [Option1])),
+ {error, {options, Option1}} = (catch ssl:listen(9443, [Option1 | ServerOpts])),
Option2 = {alpn_preferred_protocols, [<<"foo/1">>, hello]},
{error, {options, {alpn_preferred_protocols, {invalid_protocol, hello}}}}
- = (catch ssl:listen(9443, [Option2])),
+ = (catch ssl:listen(9443, [Option2 | ServerOpts])),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_opts, Config),
Option3 = {alpn_advertised_protocols, hello},
- {error, {options, Option3}} = (catch ssl:connect({127,0,0,1}, 9443, [Option3])),
+ {error, {options, Option3}} = (catch ssl:connect({127,0,0,1}, 9443, [Option3 | ClientOpts])),
Option4 = {alpn_advertised_protocols, [<<"foo/1">>, hello]},
{error, {options, {alpn_advertised_protocols, {invalid_protocol, hello}}}}
- = (catch ssl:connect({127,0,0,1}, 9443, [Option4])).
+ = (catch ssl:connect({127,0,0,1}, 9443, [Option4 | ClientOpts])).
%--------------------------------------------------------------------------------
@@ -226,9 +238,9 @@ client_alpn_and_server_alpn_npn(Config) when is_list(Config) ->
client_renegotiate(Config) when is_list(Config) ->
Data = "hello world",
- ClientOpts0 = proplists:get_value(client_opts, Config),
+ ClientOpts0 = ssl_test_lib:ssl_options(client_rsa_opts, Config),
ClientOpts = [{alpn_advertised_protocols, [<<"http/1.0">>]}] ++ ClientOpts0,
- ServerOpts0 = proplists:get_value(server_opts, Config),
+ ServerOpts0 = ssl_test_lib:ssl_options(server_rsa_opts, Config),
ServerOpts = [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]}] ++ ServerOpts0,
ExpectedProtocol = {ok, <<"http/1.0">>},
@@ -250,9 +262,9 @@ client_renegotiate(Config) when is_list(Config) ->
%--------------------------------------------------------------------------------
session_reused(Config) when is_list(Config)->
- ClientOpts0 = proplists:get_value(client_opts, Config),
+ ClientOpts0 = ssl_test_lib:ssl_options(client_rsa_opts, Config),
ClientOpts = [{alpn_advertised_protocols, [<<"http/1.0">>]}] ++ ClientOpts0,
- ServerOpts0 = proplists:get_value(server_opts, Config),
+ ServerOpts0 = ssl_test_lib:ssl_options(server_rsa_opts, Config),
ServerOpts = [{alpn_preferred_protocols, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]}] ++ ServerOpts0,
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
@@ -299,7 +311,7 @@ session_reused(Config) when is_list(Config)->
%--------------------------------------------------------------------------------
alpn_not_supported_client(Config) when is_list(Config) ->
- ClientOpts0 = proplists:get_value(client_opts, Config),
+ ClientOpts0 = ssl_test_lib:ssl_options(client_rsa_opts, Config),
PrefProtocols = {client_preferred_next_protocols,
{client, [<<"http/1.0">>], <<"http/1.1">>}},
ClientOpts = [PrefProtocols] ++ ClientOpts0,
@@ -315,7 +327,7 @@ alpn_not_supported_client(Config) when is_list(Config) ->
%--------------------------------------------------------------------------------
alpn_not_supported_server(Config) when is_list(Config)->
- ServerOpts0 = proplists:get_value(server_opts, Config),
+ ServerOpts0 = ssl_test_lib:ssl_options(server_rsa_opts, Config),
AdvProtocols = {next_protocols_advertised, [<<"spdy/2">>, <<"http/1.1">>, <<"http/1.0">>]},
ServerOpts = [AdvProtocols] ++ ServerOpts0,
@@ -326,8 +338,8 @@ alpn_not_supported_server(Config) when is_list(Config)->
%%--------------------------------------------------------------------
run_failing_handshake(Config, ClientExtraOpts, ServerExtraOpts, ExpectedResult) ->
- ClientOpts = ClientExtraOpts ++ proplists:get_value(client_opts, Config),
- ServerOpts = ServerExtraOpts ++ proplists:get_value(server_opts, Config),
+ ClientOpts = ClientExtraOpts ++ ssl_test_lib:ssl_options(client_rsa_opts, Config),
+ ServerOpts = ServerExtraOpts ++ ssl_test_lib:ssl_options(server_rsa_opts, Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
@@ -346,9 +358,9 @@ run_failing_handshake(Config, ClientExtraOpts, ServerExtraOpts, ExpectedResult)
run_handshake(Config, ClientExtraOpts, ServerExtraOpts, ExpectedProtocol) ->
Data = "hello world",
- ClientOpts0 = proplists:get_value(client_opts, Config),
+ ClientOpts0 = ssl_test_lib:ssl_options(client_rsa_opts, Config),
ClientOpts = ClientExtraOpts ++ ClientOpts0,
- ServerOpts0 = proplists:get_value(server_opts, Config),
+ ServerOpts0 = ssl_test_lib:ssl_options(server_rsa_opts, Config),
ServerOpts = ServerExtraOpts ++ ServerOpts0,
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
diff --git a/lib/ssl/test/ssl_basic_SUITE.erl b/lib/ssl/test/ssl_basic_SUITE.erl
index 407152aa75..9efde4752f 100644
--- a/lib/ssl/test/ssl_basic_SUITE.erl
+++ b/lib/ssl/test/ssl_basic_SUITE.erl
@@ -53,8 +53,7 @@ all() ->
{group, options_tls},
{group, session},
{group, 'dtlsv1.2'},
- %% {group, 'dtlsv1'}, Breaks dtls in cert_verify_SUITE enable later when
- %% problem is identified and fixed
+ {group, 'dtlsv1'},
{group, 'tlsv1.2'},
{group, 'tlsv1.1'},
{group, 'tlsv1'},
@@ -277,6 +276,12 @@ end_per_suite(_Config) ->
application:stop(crypto).
%%--------------------------------------------------------------------
+
+init_per_group(GroupName, Config) when GroupName == basic_tls;
+ GroupName == options_tls;
+ GroupName == basic;
+ GroupName == options ->
+ ssl_test_lib:clean_tls_version(Config);
init_per_group(GroupName, Config) ->
case ssl_test_lib:is_tls_version(GroupName) andalso ssl_test_lib:sufficient_crypto_support(GroupName) of
true ->
@@ -291,8 +296,13 @@ init_per_group(GroupName, Config) ->
end
end.
-end_per_group(_GroupName, Config) ->
- Config.
+end_per_group(GroupName, Config) ->
+ case ssl_test_lib:is_tls_version(GroupName) of
+ true ->
+ ssl_test_lib:clean_tls_version(Config);
+ false ->
+ Config
+ end.
%%--------------------------------------------------------------------
init_per_testcase(Case, Config) when Case == unordered_protocol_versions_client;
@@ -360,6 +370,8 @@ init_per_testcase(TestCase, Config) when TestCase == psk_cipher_suites;
TestCase == psk_with_hint_cipher_suites;
TestCase == ciphers_rsa_signed_certs;
TestCase == ciphers_rsa_signed_certs_openssl_names;
+ TestCase == ciphers_ecdh_rsa_signed_certs_openssl_names;
+ TestCase == ciphers_ecdh_rsa_signed_certs;
TestCase == ciphers_dsa_signed_certs;
TestCase == ciphers_dsa_signed_certs_openssl_names;
TestCase == anonymous_cipher_suites;
@@ -368,6 +380,11 @@ init_per_testcase(TestCase, Config) when TestCase == psk_cipher_suites;
TestCase == anonymous_cipher_suites;
TestCase == psk_anon_cipher_suites;
TestCase == psk_anon_with_hint_cipher_suites;
+ TestCase == srp_cipher_suites,
+ TestCase == srp_anon_cipher_suites,
+ TestCase == srp_dsa_cipher_suites,
+ TestCase == des_rsa_cipher_suites,
+ TestCase == des_ecdh_rsa_cipher_suites,
TestCase == versions_option,
TestCase == tls_tcp_connect_big ->
ssl_test_lib:ct_log_supported_protocol_versions(Config),
@@ -386,22 +403,27 @@ init_per_testcase(reuse_session, Config) ->
init_per_testcase(rizzo, Config) ->
ssl_test_lib:ct_log_supported_protocol_versions(Config),
- ct:timetrap({seconds, 40}),
+ ct:timetrap({seconds, 60}),
+ Config;
+
+init_per_testcase(no_rizzo_rc4, Config) ->
+ ssl_test_lib:ct_log_supported_protocol_versions(Config),
+ ct:timetrap({seconds, 60}),
Config;
init_per_testcase(rizzo_one_n_minus_one, Config) ->
ct:log("TLS/SSL version ~p~n ", [tls_record:supported_protocol_versions()]),
- ct:timetrap({seconds, 40}),
+ ct:timetrap({seconds, 60}),
rizzo_add_mitigation_option(one_n_minus_one, Config);
init_per_testcase(rizzo_zero_n, Config) ->
ct:log("TLS/SSL version ~p~n ", [tls_record:supported_protocol_versions()]),
- ct:timetrap({seconds, 40}),
+ ct:timetrap({seconds, 60}),
rizzo_add_mitigation_option(zero_n, Config);
init_per_testcase(rizzo_disabled, Config) ->
ct:log("TLS/SSL version ~p~n ", [tls_record:supported_protocol_versions()]),
- ct:timetrap({seconds, 40}),
+ ct:timetrap({seconds, 60}),
rizzo_add_mitigation_option(disabled, Config);
init_per_testcase(prf, Config) ->
@@ -511,7 +533,7 @@ alerts() ->
[{doc, "Test ssl_alert:alert_txt/1"}].
alerts(Config) when is_list(Config) ->
Descriptions = [?CLOSE_NOTIFY, ?UNEXPECTED_MESSAGE, ?BAD_RECORD_MAC,
- ?DECRYPTION_FAILED, ?RECORD_OVERFLOW, ?DECOMPRESSION_FAILURE,
+ ?DECRYPTION_FAILED_RESERVED, ?RECORD_OVERFLOW, ?DECOMPRESSION_FAILURE,
?HANDSHAKE_FAILURE, ?BAD_CERTIFICATE, ?UNSUPPORTED_CERTIFICATE,
?CERTIFICATE_REVOKED,?CERTIFICATE_EXPIRED, ?CERTIFICATE_UNKNOWN,
?ILLEGAL_PARAMETER, ?UNKNOWN_CA, ?ACCESS_DENIED, ?DECODE_ERROR,
@@ -2308,20 +2330,16 @@ tls_shutdown_error(Config) when is_list(Config) ->
ciphers_rsa_signed_certs() ->
[{doc,"Test all rsa ssl cipher suites in highest support ssl/tls version"}].
-ciphers_rsa_signed_certs(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
+ciphers_rsa_signed_certs(Config) when is_list(Config) ->
Ciphers = ssl_test_lib:rsa_suites(crypto),
- ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]),
- run_suites(Ciphers, Version, Config, rsa).
+ run_suites(Ciphers, Config, rsa).
%%-------------------------------------------------------------------
ciphers_rsa_signed_certs_openssl_names() ->
[{doc,"Test all rsa ssl cipher suites in highest support ssl/tls version"}].
ciphers_rsa_signed_certs_openssl_names(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:openssl_rsa_suites(crypto),
- ct:log("tls1 openssl cipher suites ~p~n", [Ciphers]),
- run_suites(Ciphers, Version, Config, rsa).
+ Ciphers = ssl_test_lib:openssl_rsa_suites(),
+ run_suites(Ciphers, Config, rsa).
%%-------------------------------------------------------------------
ciphers_dsa_signed_certs() ->
@@ -2329,120 +2347,104 @@ ciphers_dsa_signed_certs() ->
ciphers_dsa_signed_certs(Config) when is_list(Config) ->
NVersion = ssl_test_lib:protocol_version(Config, tuple),
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:dsa_suites(NVersion),
- ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]),
- run_suites(Ciphers, Version, Config, dsa).
+ run_suites(Ciphers, Config, dsa).
%%-------------------------------------------------------------------
ciphers_dsa_signed_certs_openssl_names() ->
[{doc,"Test all dsa ssl cipher suites in highest support ssl/tls version"}].
ciphers_dsa_signed_certs_openssl_names(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:openssl_dsa_suites(),
- ct:log("tls1 openssl cipher suites ~p~n", [Ciphers]),
- run_suites(Ciphers, Version, Config, dsa).
+ run_suites(Ciphers, Config, dsa).
%%-------------------------------------------------------------------
anonymous_cipher_suites()->
[{doc,"Test the anonymous ciphersuites"}].
anonymous_cipher_suites(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:anonymous_suites(Version),
- run_suites(Ciphers, Version, Config, anonymous).
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ Ciphers = ssl_test_lib:anonymous_suites(NVersion),
+ run_suites(Ciphers, Config, anonymous).
%%-------------------------------------------------------------------
psk_cipher_suites() ->
[{doc, "Test the PSK ciphersuites WITHOUT server supplied identity hint"}].
psk_cipher_suites(Config) when is_list(Config) ->
- NVersion = tls_record:highest_protocol_version([]),
- Version = ssl_test_lib:protocol_version(Config),
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
Ciphers = ssl_test_lib:psk_suites(NVersion),
- run_suites(Ciphers, Version, Config, psk).
+ run_suites(Ciphers, Config, psk).
%%-------------------------------------------------------------------
psk_with_hint_cipher_suites()->
[{doc, "Test the PSK ciphersuites WITH server supplied identity hint"}].
psk_with_hint_cipher_suites(Config) when is_list(Config) ->
- NVersion = tls_record:highest_protocol_version([]),
- Version = ssl_test_lib:protocol_version(Config),
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
Ciphers = ssl_test_lib:psk_suites(NVersion),
- run_suites(Ciphers, Version, Config, psk_with_hint).
+ run_suites(Ciphers, Config, psk_with_hint).
%%-------------------------------------------------------------------
psk_anon_cipher_suites() ->
[{doc, "Test the anonymous PSK ciphersuites WITHOUT server supplied identity hint"}].
psk_anon_cipher_suites(Config) when is_list(Config) ->
- NVersion = tls_record:highest_protocol_version([]),
- Version = ssl_test_lib:protocol_version(Config),
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
Ciphers = ssl_test_lib:psk_anon_suites(NVersion),
- run_suites(Ciphers, Version, Config, psk_anon).
+ run_suites(Ciphers, Config, psk_anon).
%%-------------------------------------------------------------------
psk_anon_with_hint_cipher_suites()->
[{doc, "Test the anonymous PSK ciphersuites WITH server supplied identity hint"}].
psk_anon_with_hint_cipher_suites(Config) when is_list(Config) ->
- NVersion = tls_record:highest_protocol_version([]),
- Version = ssl_test_lib:protocol_version(Config),
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
Ciphers = ssl_test_lib:psk_anon_suites(NVersion),
- run_suites(Ciphers, Version, Config, psk_anon_with_hint).
+ run_suites(Ciphers, Config, psk_anon_with_hint).
%%-------------------------------------------------------------------
srp_cipher_suites()->
[{doc, "Test the SRP ciphersuites"}].
srp_cipher_suites(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:srp_suites(),
- run_suites(Ciphers, Version, Config, srp).
+ run_suites(Ciphers, Config, srp).
%%-------------------------------------------------------------------
srp_anon_cipher_suites()->
[{doc, "Test the anonymous SRP ciphersuites"}].
srp_anon_cipher_suites(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:srp_anon_suites(),
- run_suites(Ciphers, Version, Config, srp_anon).
+ run_suites(Ciphers, Config, srp_anon).
%%-------------------------------------------------------------------
srp_dsa_cipher_suites()->
[{doc, "Test the SRP DSA ciphersuites"}].
srp_dsa_cipher_suites(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:srp_dss_suites(),
- run_suites(Ciphers, Version, Config, srp_dsa).
+ run_suites(Ciphers, Config, srp_dsa).
%%-------------------------------------------------------------------
rc4_rsa_cipher_suites()->
[{doc, "Test the RC4 ciphersuites"}].
rc4_rsa_cipher_suites(Config) when is_list(Config) ->
- NVersion = tls_record:highest_protocol_version([]),
- Version = tls_record:protocol_version(NVersion),
- Ciphers = ssl_test_lib:rc4_suites(NVersion),
- run_suites(Ciphers, Version, Config, rc4_rsa).
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ Ciphers = [S || {rsa,_,_} = S <- ssl_test_lib:rc4_suites(NVersion)],
+ run_suites(Ciphers, Config, rc4_rsa).
%-------------------------------------------------------------------
rc4_ecdh_rsa_cipher_suites()->
[{doc, "Test the RC4 ciphersuites"}].
rc4_ecdh_rsa_cipher_suites(Config) when is_list(Config) ->
- NVersion = tls_record:highest_protocol_version([]),
- Version = tls_record:protocol_version(NVersion),
- Ciphers = ssl_test_lib:rc4_suites(NVersion),
- run_suites(Ciphers, Version, Config, rc4_ecdh_rsa).
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ Ciphers = [S || {ecdh_rsa,_,_} = S <- ssl_test_lib:rc4_suites(NVersion)],
+ run_suites(Ciphers, Config, rc4_ecdh_rsa).
%%-------------------------------------------------------------------
rc4_ecdsa_cipher_suites()->
[{doc, "Test the RC4 ciphersuites"}].
rc4_ecdsa_cipher_suites(Config) when is_list(Config) ->
NVersion = tls_record:highest_protocol_version([]),
- Version = tls_record:protocol_version(NVersion),
- Ciphers = ssl_test_lib:rc4_suites(NVersion),
- run_suites(Ciphers, Version, Config, rc4_ecdsa).
+ Ciphers = [S || {ecdhe_ecdsa,_,_} = S <- ssl_test_lib:rc4_suites(NVersion)],
+ run_suites(Ciphers, Config, rc4_ecdsa).
%%-------------------------------------------------------------------
des_rsa_cipher_suites()->
[{doc, "Test the des_rsa ciphersuites"}].
des_rsa_cipher_suites(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:des_suites(Config),
- run_suites(Ciphers, Version, Config, des_rsa).
+ run_suites(Ciphers, Config, des_rsa).
%-------------------------------------------------------------------
des_ecdh_rsa_cipher_suites()->
[{doc, "Test ECDH rsa signed ciphersuites"}].
des_ecdh_rsa_cipher_suites(Config) when is_list(Config) ->
NVersion = ssl_test_lib:protocol_version(Config, tuple),
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:des_suites(NVersion),
- run_suites(Ciphers, Version, Config, des_dhe_rsa).
+ run_suites(Ciphers, Config, des_dhe_rsa).
%%--------------------------------------------------------------------
default_reject_anonymous()->
@@ -2476,38 +2478,30 @@ ciphers_ecdsa_signed_certs() ->
ciphers_ecdsa_signed_certs(Config) when is_list(Config) ->
NVersion = ssl_test_lib:protocol_version(Config, tuple),
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:ecdsa_suites(NVersion),
- ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]),
- run_suites(Ciphers, Version, Config, ecdsa).
+ run_suites(Ciphers, Config, ecdsa).
%%--------------------------------------------------------------------
ciphers_ecdsa_signed_certs_openssl_names() ->
[{doc, "Test all ecdsa ssl cipher suites in highest support ssl/tls version"}].
ciphers_ecdsa_signed_certs_openssl_names(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:openssl_ecdsa_suites(),
- ct:log("tls1 openssl cipher suites ~p~n", [Ciphers]),
- run_suites(Ciphers, Version, Config, ecdsa).
+ run_suites(Ciphers, Config, ecdsa).
%%--------------------------------------------------------------------
ciphers_ecdh_rsa_signed_certs() ->
[{doc, "Test all ecdh_rsa ssl cipher suites in highest support ssl/tls version"}].
ciphers_ecdh_rsa_signed_certs(Config) when is_list(Config) ->
NVersion = ssl_test_lib:protocol_version(Config, tuple),
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:ecdh_rsa_suites(NVersion),
- ct:log("~p erlang cipher suites ~p~n", [Version, Ciphers]),
- run_suites(Ciphers, Version, Config, ecdh_rsa).
+ run_suites(Ciphers, Config, ecdh_rsa).
%%--------------------------------------------------------------------
ciphers_ecdh_rsa_signed_certs_openssl_names() ->
[{doc, "Test all ecdh_rsa ssl cipher suites in highest support ssl/tls version"}].
ciphers_ecdh_rsa_signed_certs_openssl_names(Config) when is_list(Config) ->
- Version = ssl_test_lib:protocol_version(Config),
Ciphers = ssl_test_lib:openssl_ecdh_rsa_suites(),
- ct:log("tls1 openssl cipher suites ~p~n", [Ciphers]),
- run_suites(Ciphers, Version, Config, ecdh_rsa).
+ run_suites(Ciphers, Config, ecdh_rsa).
%%--------------------------------------------------------------------
reuse_session() ->
[{doc,"Test reuse of sessions (short handshake)"}].
@@ -3024,37 +3018,6 @@ der_input_opts(Opts) ->
{Cert, {Asn1Type, Key}, CaCerts, DHParams}.
%%--------------------------------------------------------------------
-%% different_ca_peer_sign() ->
-%% ["Check that a CA can have a different signature algorithm than the peer cert."];
-
-%% different_ca_peer_sign(Config) when is_list(Config) ->
-%% ClientOpts = ssl_test_lib:ssl_options(client_mix_opts, Config),
-%% ServerOpts = ssl_test_lib:ssl_options(server_mix_verify_opts, Config),
-
-%% {ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
-%% Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
-%% {from, self()},
-%% {mfa, {ssl_test_lib, send_recv_result_active_once, []}},
-%% {options, [{active, once},
-%% {verify, verify_peer} | ServerOpts]}]),
-%% Port = ssl_test_lib:inet_port(Server),
-
-%% Client = ssl_test_lib:start_client([{node, ClientNode}, {port, Port},
-%% {host, Hostname},
-%% {from, self()},
-%% {mfa, {ssl_test_lib,
-%% send_recv_result_active_once,
-%% []}},
-%% {options, [{active, once},
-%% {verify, verify_peer}
-%% | ClientOpts]}]),
-
-%% ssl_test_lib:check_result(Server, ok, Client, ok),
-%% ssl_test_lib:close(Server),
-%% ssl_test_lib:close(Client).
-
-
-%%--------------------------------------------------------------------
no_reuses_session_server_restart_new_cert() ->
[{doc,"Check that a session is not reused if the server is restarted with a new cert."}].
no_reuses_session_server_restart_new_cert(Config) when is_list(Config) ->
@@ -3122,14 +3085,14 @@ no_reuses_session_server_restart_new_cert_file(Config) when is_list(Config) ->
DsaServerOpts = ssl_test_lib:ssl_options(server_dsa_opts, Config),
PrivDir = proplists:get_value(priv_dir, Config),
- NewServerOpts = new_config(PrivDir, ServerOpts),
+ NewServerOpts0 = new_config(PrivDir, ServerOpts),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server =
ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
{from, self()},
{mfa, {ssl_test_lib, session_info_result, []}},
- {options, NewServerOpts}]),
+ {options, NewServerOpts0}]),
Port = ssl_test_lib:inet_port(Server),
Client0 =
ssl_test_lib:start_client([{node, ClientNode},
@@ -3150,13 +3113,13 @@ no_reuses_session_server_restart_new_cert_file(Config) when is_list(Config) ->
ssl:clear_pem_cache(),
- NewServerOpts = new_config(PrivDir, DsaServerOpts),
+ NewServerOpts1 = new_config(PrivDir, DsaServerOpts),
Server1 =
ssl_test_lib:start_server([{node, ServerNode}, {port, Port},
{from, self()},
{mfa, {ssl_test_lib, no_result, []}},
- {options, NewServerOpts}]),
+ {options, NewServerOpts1}]),
Client1 =
ssl_test_lib:start_client([{node, ClientNode},
{port, Port}, {host, Hostname},
@@ -3807,8 +3770,10 @@ no_rizzo_rc4() ->
no_rizzo_rc4(Config) when is_list(Config) ->
Prop = proplists:get_value(tc_group_properties, Config),
Version = proplists:get_value(name, Prop),
- Ciphers = [ssl_cipher:erl_suite_definition(Suite) ||
- Suite <- ssl_test_lib:rc4_suites(tls_record:protocol_version(Version))],
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ %% Test uses RSA certs
+ Ciphers = ssl_test_lib:rc4_suites(NVersion) -- [{ecdhe_ecdsa,rc4_128,sha},
+ {ecdh_ecdsa,rc4_128,sha}],
run_send_recv_rizzo(Ciphers, Config, Version,
{?MODULE, send_recv_result_active_no_rizzo, []}).
@@ -3818,7 +3783,8 @@ rizzo_one_n_minus_one() ->
rizzo_one_n_minus_one(Config) when is_list(Config) ->
Prop = proplists:get_value(tc_group_properties, Config),
Version = proplists:get_value(name, Prop),
- AllSuites = ssl_test_lib:available_suites(tls_record:protocol_version(Version)),
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ AllSuites = ssl_test_lib:available_suites(NVersion),
Ciphers = [X || X ={_,Y,_} <- AllSuites, Y =/= rc4_128],
run_send_recv_rizzo(Ciphers, Config, Version,
{?MODULE, send_recv_result_active_rizzo, []}).
@@ -3829,7 +3795,8 @@ rizzo_zero_n() ->
rizzo_zero_n(Config) when is_list(Config) ->
Prop = proplists:get_value(tc_group_properties, Config),
Version = proplists:get_value(name, Prop),
- AllSuites = ssl_test_lib:available_suites(tls_record:protocol_version(Version)),
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ AllSuites = ssl_test_lib:available_suites(NVersion),
Ciphers = [X || X ={_,Y,_} <- AllSuites, Y =/= rc4_128],
run_send_recv_rizzo(Ciphers, Config, Version,
{?MODULE, send_recv_result_active_no_rizzo, []}).
@@ -4631,7 +4598,10 @@ client_server_opts({KeyAlgo,_,_}, Config) when KeyAlgo == ecdh_rsa ->
{ssl_test_lib:ssl_options(client_opts, Config),
ssl_test_lib:ssl_options(server_ecdh_rsa_opts, Config)}.
-run_suites(Ciphers, Version, Config, Type) ->
+run_suites(Ciphers, Config, Type) ->
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ Version = ssl_test_lib:protocol_version(Config),
+ ct:log("Running cipher suites ~p~n", [Ciphers]),
{ClientOpts, ServerOpts} =
case Type of
rsa ->
@@ -4643,23 +4613,24 @@ run_suites(Ciphers, Version, Config, Type) ->
anonymous ->
%% No certs in opts!
{ssl_test_lib:ssl_options(client_verification_opts, Config),
- [{reuseaddr, true}, {ciphers, ssl_test_lib:anonymous_suites(Version)}]};
+ [{reuseaddr, true}, {ciphers, ssl_test_lib:anonymous_suites(NVersion)} |
+ ssl_test_lib:ssl_options([], Config)]};
psk ->
{ssl_test_lib:ssl_options(client_psk, Config),
- [{ciphers, ssl_test_lib:psk_suites(Version)} |
+ [{ciphers, ssl_test_lib:psk_suites(NVersion)} |
ssl_test_lib:ssl_options(server_psk, Config)]};
psk_with_hint ->
{ssl_test_lib:ssl_options(client_psk, Config),
- [{ciphers, ssl_test_lib:psk_suites(Version)} |
+ [{ciphers, ssl_test_lib:psk_suites(NVersion)} |
ssl_test_lib:ssl_options(server_psk_hint, Config)
]};
psk_anon ->
{ssl_test_lib:ssl_options(client_psk, Config),
- [{ciphers, ssl_test_lib:psk_anon_suites(Version)} |
+ [{ciphers, ssl_test_lib:psk_anon_suites(NVersion)} |
ssl_test_lib:ssl_options(server_psk_anon, Config)]};
psk_anon_with_hint ->
{ssl_test_lib:ssl_options(client_psk, Config),
- [{ciphers, ssl_test_lib:psk_anon_suites(Version)} |
+ [{ciphers, ssl_test_lib:psk_anon_suites(NVersion)} |
ssl_test_lib:ssl_options(server_psk_anon_hint, Config)]};
srp ->
{ssl_test_lib:ssl_options(client_srp, Config),
diff --git a/lib/ssl/test/ssl_certificate_verify_SUITE.erl b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
index 6221cffdc1..c3fd73bf09 100644
--- a/lib/ssl/test/ssl_certificate_verify_SUITE.erl
+++ b/lib/ssl/test/ssl_certificate_verify_SUITE.erl
@@ -110,8 +110,8 @@ init_per_group(tls, Config0) ->
application:load(ssl),
application:set_env(ssl, protocol_version, Version),
ssl:start(),
- Config = proplists:delete(protocol, Config0),
- [{protocol, tls}, {version, tls_record:protocol_version(Version)} | Config];
+ Config = ssl_test_lib:init_tls_version(Version, Config0),
+ [{version, tls_record:protocol_version(Version)} | Config];
init_per_group(dtls, Config0) ->
Version = dtls_record:protocol_version(dtls_record:highest_protocol_version([])),
@@ -119,8 +119,8 @@ init_per_group(dtls, Config0) ->
application:load(ssl),
application:set_env(ssl, protocol_version, Version),
ssl:start(),
- Config = proplists:delete(protocol_opts, proplists:delete(protocol, Config0)),
- [{protocol, dtls}, {protocol_opts, [{protocol, dtls}]}, {version, dtls_record:protocol_version(Version)} | Config];
+ Config = ssl_test_lib:init_tls_version(Version, Config0),
+ [{version, dtls_record:protocol_version(Version)} | Config];
init_per_group(active, Config) ->
[{active, true}, {receive_function, send_recv_result_active} | Config];
@@ -134,6 +134,9 @@ init_per_group(error_handling, Config) ->
init_per_group(_, Config) ->
Config.
+end_per_group(GroupName, Config) when GroupName == tls;
+ GroupName == dtls ->
+ ssl_test_lib:clean_tls_version(Config);
end_per_group(_GroupName, Config) ->
Config.
diff --git a/lib/ssl/test/ssl_crl_SUITE.erl b/lib/ssl/test/ssl_crl_SUITE.erl
index e293d183f7..668c76e38d 100644
--- a/lib/ssl/test/ssl_crl_SUITE.erl
+++ b/lib/ssl/test/ssl_crl_SUITE.erl
@@ -155,9 +155,15 @@ init_per_testcase(Case, Config0) ->
DataDir = proplists:get_value(data_dir, Config),
CertDir = filename:join(proplists:get_value(priv_dir, Config0), idp_crl),
{CertOpts, Config} = init_certs(CertDir, idp_crl, Config),
- {ok, _} = make_certs:all(DataDir, CertDir, CertOpts),
- ct:timetrap({seconds, 6}),
- [{cert_dir, CertDir} | Config];
+ case make_certs:all(DataDir, CertDir, CertOpts) of
+ {ok, _} ->
+ ct:timetrap({seconds, 6}),
+ [{cert_dir, CertDir} | Config];
+ _ ->
+ end_per_testcase(Case, Config0),
+ ssl_test_lib:clean_start(),
+ {skip, "Unable to create IDP crls"}
+ end;
false ->
end_per_testcase(Case, Config0),
ssl_test_lib:clean_start(),
diff --git a/lib/ssl/test/ssl_npn_handshake_SUITE.erl b/lib/ssl/test/ssl_npn_handshake_SUITE.erl
index a02881f1ae..6bf2aa2786 100644
--- a/lib/ssl/test/ssl_npn_handshake_SUITE.erl
+++ b/lib/ssl/test/ssl_npn_handshake_SUITE.erl
@@ -95,8 +95,13 @@ init_per_group(GroupName, Config) ->
Config
end.
-end_per_group(_GroupName, Config) ->
- Config.
+end_per_group(GroupName, Config) ->
+ case ssl_test_lib:is_tls_version(GroupName) of
+ true ->
+ ssl_test_lib:clean_tls_version(Config);
+ false ->
+ Config
+ end.
init_per_testcase(_TestCase, Config) ->
ssl_test_lib:ct_log_supported_protocol_versions(Config),
diff --git a/lib/ssl/test/ssl_packet_SUITE.erl b/lib/ssl/test/ssl_packet_SUITE.erl
index 7281425461..408d62ce9c 100644
--- a/lib/ssl/test/ssl_packet_SUITE.erl
+++ b/lib/ssl/test/ssl_packet_SUITE.erl
@@ -53,28 +53,34 @@ all() ->
{group, 'tlsv1.2'},
{group, 'tlsv1.1'},
{group, 'tlsv1'},
- {group, 'sslv3'}
+ {group, 'sslv3'},
+ {group, 'dtlsv1.2'},
+ {group, 'dtlsv1'}
].
groups() ->
- [{'tlsv1.2', [], packet_tests()},
- {'tlsv1.1', [], packet_tests()},
- {'tlsv1', [], packet_tests()},
- {'sslv3', [], packet_tests()}
+ [{'tlsv1.2', [], socket_packet_tests() ++ protocol_packet_tests()},
+ {'tlsv1.1', [], socket_packet_tests() ++ protocol_packet_tests()},
+ {'tlsv1', [], socket_packet_tests() ++ protocol_packet_tests()},
+ {'sslv3', [], socket_packet_tests() ++ protocol_packet_tests()},
+ {'dtlsv1.2', [], protocol_packet_tests()},
+ {'dtlsv1', [], protocol_packet_tests()}
].
-packet_tests() ->
- active_packet_tests() ++ active_once_packet_tests() ++ passive_packet_tests() ++
- [packet_send_to_large,
- packet_cdr_decode, packet_cdr_decode_list,
+socket_packet_tests() ->
+ socket_active_packet_tests() ++ socket_active_once_packet_tests() ++
+ socket_passive_packet_tests() ++ [packet_send_to_large, packet_tpkt_decode, packet_tpkt_decode_list].
+
+protocol_packet_tests() ->
+ protocol_active_packet_tests() ++ protocol_active_once_packet_tests() ++ protocol_passive_packet_tests() ++
+ [packet_cdr_decode, packet_cdr_decode_list,
packet_http_decode, packet_http_decode_list,
packet_http_bin_decode_multi,
packet_line_decode, packet_line_decode_list,
packet_asn1_decode, packet_asn1_decode_list,
- packet_tpkt_decode, packet_tpkt_decode_list,
packet_sunrm_decode, packet_sunrm_decode_list].
-passive_packet_tests() ->
+socket_passive_packet_tests() ->
[packet_raw_passive_many_small,
packet_0_passive_many_small,
packet_1_passive_many_small,
@@ -85,12 +91,8 @@ passive_packet_tests() ->
packet_1_passive_some_big,
packet_2_passive_some_big,
packet_4_passive_some_big,
- packet_httph_passive,
- packet_httph_bin_passive,
- packet_http_error_passive,
packet_wait_passive,
packet_size_passive,
- packet_baddata_passive,
%% inet header option should be deprecated!
header_decode_one_byte_passive,
header_decode_two_bytes_passive,
@@ -98,7 +100,14 @@ passive_packet_tests() ->
header_decode_two_bytes_one_sent_passive
].
-active_once_packet_tests() ->
+protocol_passive_packet_tests() ->
+ [packet_httph_passive,
+ packet_httph_bin_passive,
+ packet_http_error_passive,
+ packet_baddata_passive
+ ].
+
+socket_active_once_packet_tests() ->
[packet_raw_active_once_many_small,
packet_0_active_once_many_small,
packet_1_active_once_many_small,
@@ -108,12 +117,16 @@ active_once_packet_tests() ->
packet_0_active_once_some_big,
packet_1_active_once_some_big,
packet_2_active_once_some_big,
- packet_4_active_once_some_big,
+ packet_4_active_once_some_big
+ ].
+
+protocol_active_once_packet_tests() ->
+ [
packet_httph_active_once,
packet_httph_bin_active_once
].
-active_packet_tests() ->
+socket_active_packet_tests() ->
[packet_raw_active_many_small,
packet_0_active_many_small,
packet_1_active_many_small,
@@ -124,10 +137,7 @@ active_packet_tests() ->
packet_1_active_some_big,
packet_2_active_some_big,
packet_4_active_some_big,
- packet_httph_active,
- packet_httph_bin_active,
packet_wait_active,
- packet_baddata_active,
packet_size_active,
%% inet header option should be deprecated!
header_decode_one_byte_active,
@@ -136,6 +146,13 @@ active_packet_tests() ->
header_decode_two_bytes_one_sent_active
].
+
+protocol_active_packet_tests() ->
+ [packet_httph_active,
+ packet_httph_bin_active,
+ packet_baddata_active
+ ].
+
init_per_suite(Config) ->
catch crypto:stop(),
try crypto:start() of
@@ -168,8 +185,13 @@ init_per_group(GroupName, Config) ->
end.
-end_per_group(_GroupName, Config) ->
- Config.
+end_per_group(GroupName, Config) ->
+ case ssl_test_lib:is_tls_version(GroupName) of
+ true ->
+ ssl_test_lib:clean_tls_version(Config);
+ false ->
+ Config
+ end.
init_per_testcase(_TestCase, Config) ->
ct:timetrap({seconds, ?BASE_TIMEOUT_SECONDS}),
diff --git a/lib/ssl/test/ssl_payload_SUITE.erl b/lib/ssl/test/ssl_payload_SUITE.erl
index cb1957327a..ef05241759 100644
--- a/lib/ssl/test/ssl_payload_SUITE.erl
+++ b/lib/ssl/test/ssl_payload_SUITE.erl
@@ -95,8 +95,13 @@ init_per_group(GroupName, Config) ->
Config
end.
-end_per_group(_GroupName, Config) ->
- Config.
+end_per_group(GroupName, Config) ->
+ case ssl_test_lib:is_tls_version(GroupName) of
+ true ->
+ ssl_test_lib:clean_tls_version(Config);
+ false ->
+ Config
+ end.
init_per_testcase(TestCase, Config) when TestCase == server_echos_passive_huge;
TestCase == server_echos_active_once_huge;
diff --git a/lib/ssl/test/ssl_sni_SUITE.erl b/lib/ssl/test/ssl_sni_SUITE.erl
index 4e916a7f03..03676cb828 100644
--- a/lib/ssl/test/ssl_sni_SUITE.erl
+++ b/lib/ssl/test/ssl_sni_SUITE.erl
@@ -30,21 +30,50 @@
%% Common Test interface functions -----------------------------------
%%--------------------------------------------------------------------
-all() -> [no_sni_header,
- sni_match,
- sni_no_match,
- no_sni_header_fun,
- sni_match_fun,
- sni_no_match_fun].
+all() ->
+ [{group, 'tlsv1.2'},
+ {group, 'tlsv1.1'},
+ {group, 'tlsv1'},
+ {group, 'sslv3'},
+ {group, 'dtlsv1.2'},
+ {group, 'dtlsv1'}
+ ].
+
+groups() ->
+ [
+ {'tlsv1.2', [], sni_tests()},
+ {'tlsv1.1', [], sni_tests()},
+ {'tlsv1', [], sni_tests()},
+ {'sslv3', [], sni_tests()},
+ {'dtlsv1.2', [], sni_tests()},
+ {'dtlsv1', [], sni_tests()}
+ ].
+
+sni_tests() ->
+ [no_sni_header,
+ sni_match,
+ sni_no_match,
+ no_sni_header_fun,
+ sni_match_fun,
+ sni_no_match_fun].
init_per_suite(Config0) ->
catch crypto:stop(),
try crypto:start() of
ok ->
ssl_test_lib:clean_start(),
- {ok, _} = make_certs:all(proplists:get_value(data_dir, Config0),
- proplists:get_value(priv_dir, Config0)),
- ssl_test_lib:cert_options(Config0)
+ Config = ssl_test_lib:make_rsa_cert(Config0),
+ RsaOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ [{sni_server_opts, [{sni_hosts, [
+ {"a.server", [
+ {certfile, proplists:get_value(certfile, RsaOpts)},
+ {keyfile, proplists:get_value(keyfile, RsaOpts)}
+ ]},
+ {"b.server", [
+ {certfile, proplists:get_value(certfile, RsaOpts)},
+ {keyfile, proplists:get_value(keyfile, RsaOpts)}
+ ]}
+ ]}]} | Config]
catch _:_ ->
{skip, "Crypto did not start"}
end.
@@ -66,22 +95,22 @@ end_per_testcase(_TestCase, Config) ->
%% Test Cases --------------------------------------------------------
%%--------------------------------------------------------------------
no_sni_header(Config) ->
- run_handshake(Config, undefined, undefined, "server").
+ run_handshake(Config, undefined, undefined, "server Peer cert").
no_sni_header_fun(Config) ->
- run_sni_fun_handshake(Config, undefined, undefined, "server").
+ run_sni_fun_handshake(Config, undefined, undefined, "server Peer cert").
sni_match(Config) ->
- run_handshake(Config, "a.server", "a.server", "a.server").
+ run_handshake(Config, "a.server", "a.server", "server Peer cert").
sni_match_fun(Config) ->
- run_sni_fun_handshake(Config, "a.server", "a.server", "a.server").
+ run_sni_fun_handshake(Config, "a.server", "a.server", "server Peer cert").
sni_no_match(Config) ->
- run_handshake(Config, "c.server", undefined, "server").
+ run_handshake(Config, "c.server", undefined, "server Peer cert").
sni_no_match_fun(Config) ->
- run_sni_fun_handshake(Config, "c.server", undefined, "server").
+ run_sni_fun_handshake(Config, "c.server", undefined, "server Peer cert").
%%--------------------------------------------------------------------
@@ -141,13 +170,13 @@ run_sni_fun_handshake(Config, SNIHostname, ExpectedSNIHostname, ExpectedCN) ->
[Config, SNIHostname, ExpectedSNIHostname, ExpectedCN]),
[{sni_hosts, ServerSNIConf}] = proplists:get_value(sni_server_opts, Config),
SNIFun = fun(Domain) -> proplists:get_value(Domain, ServerSNIConf, undefined) end,
- ServerOptions = proplists:get_value(server_opts, Config) ++ [{sni_fun, SNIFun}],
+ ServerOptions = ssl_test_lib:ssl_options(server_rsa_opts, Config) ++ [{sni_fun, SNIFun}],
ClientOptions =
case SNIHostname of
undefined ->
- proplists:get_value(client_opts, Config);
+ ssl_test_lib:ssl_options(client_rsa_opts, Config);
_ ->
- [{server_name_indication, SNIHostname}] ++ proplists:get_value(client_opts, Config)
+ [{server_name_indication, SNIHostname}] ++ ssl_test_lib:ssl_options(client_rsa_opts, Config)
end,
ct:log("Options: ~p", [[ServerOptions, ClientOptions]]),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
@@ -167,14 +196,14 @@ run_handshake(Config, SNIHostname, ExpectedSNIHostname, ExpectedCN) ->
ct:log("Start running handshake, Config: ~p, SNIHostname: ~p, "
"ExpectedSNIHostname: ~p, ExpectedCN: ~p",
[Config, SNIHostname, ExpectedSNIHostname, ExpectedCN]),
- ServerOptions = proplists:get_value(sni_server_opts, Config) ++ proplists:get_value(server_opts, Config),
+ ServerOptions = proplists:get_value(sni_server_opts, Config) ++ ssl_test_lib:ssl_options(server_rsa_opts, Config),
ClientOptions =
- case SNIHostname of
- undefined ->
- proplists:get_value(client_opts, Config);
- _ ->
- [{server_name_indication, SNIHostname}] ++ proplists:get_value(client_opts, Config)
- end,
+ case SNIHostname of
+ undefined ->
+ ssl_test_lib:ssl_options(client_rsa_opts, Config);
+ _ ->
+ [{server_name_indication, SNIHostname}] ++ ssl_test_lib:ssl_options(client_rsa_opts, Config)
+ end,
ct:log("Options: ~p", [[ServerOptions, ClientOptions]]),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
diff --git a/lib/ssl/test/ssl_test_lib.erl b/lib/ssl/test/ssl_test_lib.erl
index 77c21d9b57..aae2927575 100644
--- a/lib/ssl/test/ssl_test_lib.erl
+++ b/lib/ssl/test/ssl_test_lib.erl
@@ -384,10 +384,6 @@ cert_options(Config) ->
"badkey.pem"]),
PskSharedSecret = <<1,2,3,4,5,6,7,8,9,10,11,12,13,14,15>>,
- SNIServerACertFile = filename:join([proplists:get_value(priv_dir, Config), "a.server", "cert.pem"]),
- SNIServerAKeyFile = filename:join([proplists:get_value(priv_dir, Config), "a.server", "key.pem"]),
- SNIServerBCertFile = filename:join([proplists:get_value(priv_dir, Config), "b.server", "cert.pem"]),
- SNIServerBKeyFile = filename:join([proplists:get_value(priv_dir, Config), "b.server", "key.pem"]),
[{client_opts, [{cacertfile, ClientCaCertFile},
{certfile, ClientCertFile},
{keyfile, ClientKeyFile}]},
@@ -445,46 +441,34 @@ cert_options(Config) ->
{server_bad_cert, [{ssl_imp, new},{cacertfile, ServerCaCertFile},
{certfile, BadCertFile}, {keyfile, ServerKeyFile}]},
{server_bad_key, [{ssl_imp, new},{cacertfile, ServerCaCertFile},
- {certfile, ServerCertFile}, {keyfile, BadKeyFile}]},
- {sni_server_opts, [{sni_hosts, [
- {"a.server", [
- {certfile, SNIServerACertFile},
- {keyfile, SNIServerAKeyFile}
- ]},
- {"b.server", [
- {certfile, SNIServerBCertFile},
- {keyfile, SNIServerBKeyFile}
- ]}
- ]}]}
+ {certfile, ServerCertFile}, {keyfile, BadKeyFile}]}
| Config].
-make_dsa_cert(Config) ->
- {ServerCaCertFile, ServerCertFile, ServerKeyFile} =
- make_cert_files("server", Config, dsa, dsa, "", []),
- {ClientCaCertFile, ClientCertFile, ClientKeyFile} =
- make_cert_files("client", Config, dsa, dsa, "", []),
- [{server_dsa_opts, [{ssl_imp, new},{reuseaddr, true},
- {cacertfile, ServerCaCertFile},
- {certfile, ServerCertFile}, {keyfile, ServerKeyFile}]},
- {server_dsa_verify_opts, [{ssl_imp, new},{reuseaddr, true},
- {cacertfile, ClientCaCertFile},
- {certfile, ServerCertFile}, {keyfile, ServerKeyFile},
- {verify, verify_peer}]},
- {client_dsa_opts, [{ssl_imp, new},
- {cacertfile, ClientCaCertFile},
- {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]},
- {server_srp_dsa, [{ssl_imp, new},{reuseaddr, true},
- {cacertfile, ServerCaCertFile},
- {certfile, ServerCertFile}, {keyfile, ServerKeyFile},
- {user_lookup_fun, {fun user_lookup/3, undefined}},
- {ciphers, srp_dss_suites()}]},
- {client_srp_dsa, [{ssl_imp, new},
- {srp_identity, {"Test-User", "secret"}},
- {cacertfile, ClientCaCertFile},
- {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]}
- | Config].
-
+make_dsa_cert(Config) ->
+ CryptoSupport = crypto:supports(),
+ case proplists:get_bool(dss, proplists:get_value(public_keys, CryptoSupport)) of
+ true ->
+ ClientFileBase = filename:join([proplists:get_value(priv_dir, Config), "dsa"]),
+ ServerFileBase = filename:join([proplists:get_value(priv_dir, Config), "dsa"]),
+ KeyGenSpec = key_gen_info(dsa, dsa),
+
+ GenCertData = x509_test:gen_test_certs([{digest, sha} | KeyGenSpec]),
+ [{server_config, ServerConf},
+ {client_config, ClientConf}] =
+ x509_test:gen_pem_config_files(GenCertData, ClientFileBase, ServerFileBase),
+
+ [{server_dsa_opts, ServerConf},
+ {server_dsa_verify_opts, [{verify, verify_peer} | ServerConf]},
+ {client_dsa_opts, ClientConf},
+ {server_srp_dsa, [{user_lookup_fun, {fun user_lookup/3, undefined}},
+ {ciphers, srp_dss_suites()} | ServerConf]},
+ {client_srp_dsa, [{srp_identity, {"Test-User", "secret"}}
+ | ClientConf]}
+ | Config];
+ false ->
+ Config
+ end.
make_rsa_cert_chains(ChainConf, Config, Suffix) ->
CryptoSupport = crypto:supports(),
KeyGenSpec = key_gen_info(rsa, rsa),
@@ -541,6 +525,11 @@ key_gen_spec(Role, rsa) ->
[{list_to_atom(Role ++ "_key_gen"), hardcode_rsa_key(1)},
{list_to_atom(Role ++ "_key_gen_chain"), [hardcode_rsa_key(2),
hardcode_rsa_key(3)]}
+ ];
+key_gen_spec(Role, dsa) ->
+ [{list_to_atom(Role ++ "_key_gen"), hardcode_dsa_key(1)},
+ {list_to_atom(Role ++ "_key_gen_chain"), [hardcode_dsa_key(2),
+ hardcode_dsa_key(3)]}
].
make_ecdsa_cert(Config) ->
CryptoSupport = crypto:supports(),
@@ -638,41 +627,6 @@ make_ecdh_rsa_cert(Config) ->
Config
end.
-make_mix_cert(Config) ->
- {ServerCaCertFile, ServerCertFile, ServerKeyFile} = make_cert_files("server", Config, dsa,
- rsa, "mix", []),
- {ClientCaCertFile, ClientCertFile, ClientKeyFile} = make_cert_files("client", Config, dsa,
- rsa, "mix", []),
- [{server_mix_opts, [{ssl_imp, new},{reuseaddr, true},
- {cacertfile, ServerCaCertFile},
- {certfile, ServerCertFile}, {keyfile, ServerKeyFile}]},
- {server_mix_verify_opts, [{ssl_imp, new},{reuseaddr, true},
- {cacertfile, ClientCaCertFile},
- {certfile, ServerCertFile}, {keyfile, ServerKeyFile},
- {verify, verify_peer}]},
- {client_mix_opts, [{ssl_imp, new},
- {cacertfile, ClientCaCertFile},
- {certfile, ClientCertFile}, {keyfile, ClientKeyFile}]}
- | Config].
-
-make_cert_files(RoleStr, Config, Alg1, Alg2, Prefix, Opts) ->
- Alg1Str = atom_to_list(Alg1),
- Alg2Str = atom_to_list(Alg2),
- CaInfo = {CaCert, _} = erl_make_certs:make_cert([{key, Alg1}| Opts]),
- {Cert, CertKey} = erl_make_certs:make_cert([{key, Alg2}, {issuer, CaInfo} | Opts]),
- CaCertFile = filename:join([proplists:get_value(priv_dir, Config),
- RoleStr, Prefix ++ Alg1Str ++ "_cacerts.pem"]),
- CertFile = filename:join([proplists:get_value(priv_dir, Config),
- RoleStr, Prefix ++ Alg2Str ++ "_cert.pem"]),
- KeyFile = filename:join([proplists:get_value(priv_dir, Config),
- RoleStr, Prefix ++ Alg2Str ++ "_key.pem"]),
-
- der_to_pem(CaCertFile, [{'Certificate', CaCert, not_encrypted}]),
- der_to_pem(CertFile, [{'Certificate', Cert, not_encrypted}]),
- der_to_pem(KeyFile, [CertKey]),
- {CaCertFile, CertFile, KeyFile}.
-
-
start_upgrade_server(Args) ->
Result = spawn_link(?MODULE, run_upgrade_server, [Args]),
receive
@@ -983,16 +937,10 @@ ecdh_rsa_suites(Version) ->
end,
available_suites(Version)).
-openssl_rsa_suites(CounterPart) ->
+openssl_rsa_suites() ->
Ciphers = ssl:cipher_suites(openssl),
- Names = case is_sane_ecc(CounterPart) of
- true ->
- "DSS | ECDSA";
- false ->
- "DSS | ECDHE | ECDH"
- end,
- lists:filter(fun(Str) -> string_regex_filter(Str, Names)
- end, Ciphers).
+ lists:filter(fun(Str) -> string_regex_filter(Str, "RSA")
+ end, Ciphers) -- openssl_ecdh_rsa_suites().
openssl_dsa_suites() ->
Ciphers = ssl:cipher_suites(openssl),
@@ -1026,11 +974,11 @@ string_regex_filter(_Str, _Search) ->
false.
anonymous_suites(Version) ->
- Suites = ssl_cipher:anonymous_suites(Version),
+ Suites = [ssl_cipher:erl_suite_definition(S) || S <- ssl_cipher:anonymous_suites(Version)],
ssl_cipher:filter_suites(Suites).
psk_suites(Version) ->
- Suites = ssl_cipher:psk_suites(Version),
+ Suites = [ssl_cipher:erl_suite_definition(S) || S <- ssl_cipher:psk_suites(Version)],
ssl_cipher:filter_suites(Suites).
psk_anon_suites(Version) ->
@@ -1062,7 +1010,7 @@ srp_dss_suites() ->
ssl_cipher:filter_suites(Suites).
rc4_suites(Version) ->
- Suites = ssl_cipher:rc4_suites(Version),
+ Suites = [ssl_cipher:erl_suite_definition(S) || S <- ssl_cipher:rc4_suites(Version)],
ssl_cipher:filter_suites(Suites).
des_suites(Version) ->
@@ -1167,6 +1115,9 @@ init_tls_version(Version, Config) ->
NewConfig = proplists:delete(protocol_opts, proplists:delete(protocol, Config)),
[{protocol, tls} | NewConfig].
+clean_tls_version(Config) ->
+ proplists:delete(protocol_opts, proplists:delete(protocol, Config)).
+
sufficient_crypto_support(Version)
when Version == 'tlsv1.2'; Version == 'dtlsv1.2' ->
CryptoSupport = crypto:supports(),
@@ -1276,7 +1227,7 @@ is_fips(_) ->
false.
cipher_restriction(Config0) ->
- Version = tls_record:protocol_version(protocol_version(Config0)),
+ Version = protocol_version(Config0, tuple),
case is_sane_ecc(openssl) of
false ->
Opts = proplists:get_value(server_opts, Config0),
@@ -1294,13 +1245,19 @@ check_sane_openssl_version(Version) ->
case supports_ssl_tls_version(Version) of
true ->
case {Version, os:cmd("openssl version")} of
+ {'sslv3', "OpenSSL 1.0.2" ++ _} ->
+ false;
{_, "OpenSSL 1.0.2" ++ _} ->
true;
{_, "OpenSSL 1.0.1" ++ _} ->
true;
- {'tlsv1.2', "OpenSSL 1.0" ++ _} ->
+ {'tlsv1.2', "OpenSSL 1.0.0" ++ _} ->
+ false;
+ {'tlsv1.1', "OpenSSL 1.0.0" ++ _} ->
+ false;
+ {'dtlsv1.2', "OpenSSL 1.0.0" ++ _} ->
false;
- {'tlsv1.1', "OpenSSL 1.0" ++ _} ->
+ {'dtlsv1', "OpenSSL 1.0.0" ++ _} ->
false;
{'tlsv1.2', "OpenSSL 0" ++ _} ->
false;
@@ -1365,6 +1322,12 @@ version_flag('dtlsv1.2') ->
version_flag('dtlsv1') ->
"-dtls1".
+filter_suites([Cipher | _] = Ciphers, AtomVersion) when is_list(Cipher)->
+ filter_suites([ssl_cipher:openssl_suite(S) || S <- Ciphers],
+ AtomVersion);
+filter_suites([Cipher | _] = Ciphers, AtomVersion) when is_binary(Cipher)->
+ filter_suites([ssl_cipher:erl_suite_definition(S) || S <- Ciphers],
+ AtomVersion);
filter_suites(Ciphers0, AtomVersion) ->
Version = tls_version(AtomVersion),
Supported0 = ssl_cipher:suites(Version)
@@ -1419,12 +1382,15 @@ supports_ssl_tls_version(sslv2 = Version) ->
case os:cmd("openssl version") of
"OpenSSL 1" ++ _ ->
false;
+ %% Appears to be broken
+ "OpenSSL 0.9.8.o" ++ _ ->
+ false;
_ ->
VersionFlag = version_flag(Version),
Exe = "openssl",
Args = ["s_client", VersionFlag],
Port = ssl_test_lib:portable_open_port(Exe, Args),
- do_supports_ssl_tls_version(Port)
+ do_supports_ssl_tls_version(Port, "")
end;
supports_ssl_tls_version(Version) ->
@@ -1432,23 +1398,26 @@ supports_ssl_tls_version(Version) ->
Exe = "openssl",
Args = ["s_client", VersionFlag],
Port = ssl_test_lib:portable_open_port(Exe, Args),
- do_supports_ssl_tls_version(Port).
+ do_supports_ssl_tls_version(Port, "").
-do_supports_ssl_tls_version(Port) ->
+do_supports_ssl_tls_version(Port, Acc) ->
receive
- {Port, {data, "u"}} ->
- false;
- {Port, {data, "unknown option" ++ _}} ->
- false;
- {Port, {data, Data}} ->
- case lists:member("error", string:tokens(Data, ":")) of
- true ->
- false;
- false ->
- do_supports_ssl_tls_version(Port)
- end
+ {Port, {data, Data}} ->
+ case Acc ++ Data of
+ "unknown option" ++ _ ->
+ false;
+ Error when length(Error) >= 11 ->
+ case lists:member("error", string:tokens(Data, ":")) of
+ true ->
+ false;
+ false ->
+ do_supports_ssl_tls_version(Port, Error)
+ end;
+ _ ->
+ do_supports_ssl_tls_version(Port, Acc ++ Data)
+ end
after 1000 ->
- true
+ true
end.
ssl_options(Option, Config) when is_atom(Option) ->
@@ -1493,6 +1462,7 @@ ct_log_supported_protocol_versions(Config) ->
clean_env() ->
application:unset_env(ssl, protocol_version),
+ application:unset_env(ssl, dtls_protocol_version),
application:unset_env(ssl, session_lifetime),
application:unset_env(ssl, session_cb),
application:unset_env(ssl, session_cb_init_args),
@@ -1535,7 +1505,7 @@ tls_version(Atom) ->
tls_record:protocol_version(Atom).
hardcode_rsa_key(1) ->
- {'RSAPrivateKey',0,
+ {'RSAPrivateKey', 'two-prime',
23995666614853919027835084074500048897452890537492185072956789802729257783422306095699263934587064480357348855732149402060270996295002843755712064937715826848741191927820899197493902093529581182351132392364214171173881547273475904587683433713767834856230531387991145055273426806331200574039205571401702219159773947658558490957010003143162250693492642996408861265758000254664396313741422909188635443907373976005987612936763564996605457102336549804831742940035613780926178523017685712710473543251580072875247250504243621640157403744718833162626193206685233710319205099867303242759099560438381385658382486042995679707669,
17,
11292078406990079542510627799764728892919007311761028269626724613049062486316379339152594792746853873109340637991599718616598115903530750002688030558925094987642913848386305504703012749896273497577003478759630198199473669305165131570674557041773098755873191241407597673069847908861741446606684974777271632545629600685952292605647052193819136445675100211504432575554351515262198132231537860917084269870590492135731720141577986787033006338680118008484613510063003323516659048210893001173583018220214626635609151105287049126443102976056146630518124476470236027123782297108342869049542023328584384300970694412006494684657,
@@ -1547,7 +1517,7 @@ hardcode_rsa_key(1) ->
asn1_NOVALUE};
hardcode_rsa_key(2) ->
-{'RSAPrivateKey',0,
+{'RSAPrivateKey', 'two-prime',
21343679768589700771839799834197557895311746244621307033143551583788179817796325695589283169969489517156931770973490560582341832744966317712674900833543896521418422508485833901274928542544381247956820115082240721897193055368570146764204557110415281995205343662628196075590438954399631753508888358737971039058298703003743872818150364935790613286541190842600031570570099801682794056444451081563070538409720109449780410837763602317050353477918147758267825417201591905091231778937606362076129350476690460157227101296599527319242747999737801698427160817755293383890373574621116766934110792127739174475029121017282777887777,
17,
18832658619343853622211588088997845201745658451136447382185486691577805721584993260814073385267196632785528033211903435807948675951440868570007265441362261636545666919252206383477878125774454042314841278013741813438699754736973658909592256273895837054592950290554290654932740253882028017801960316533503857992358685308186680144968293076156011747178275038098868263178095174694099811498968993700538293188879611375604635940554394589807673542938082281934965292051746326331046224291377703201248790910007232374006151098976879987912446997911775904329728563222485791845480864283470332826504617837402078265424772379987120023773,
@@ -1559,7 +1529,7 @@ hardcode_rsa_key(2) ->
asn1_NOVALUE};
hardcode_rsa_key(3) ->
-{'RSAPrivateKey',0,
+{'RSAPrivateKey', 'two-prime',
25089040456112869869472694987833070928503703615633809313972554887193090845137746668197820419383804666271752525807484521370419854590682661809972833718476098189250708650325307850184923546875260207894844301992963978994451844985784504212035958130279304082438876764367292331581532569155681984449177635856426023931875082020262146075451989132180409962870105455517050416234175675478291534563995772675388370042873175344937421148321291640477650173765084699931690748536036544188863178325887393475703801759010864779559318631816411493486934507417755306337476945299570726975433250753415110141783026008347194577506976486290259135429,
17,
8854955455098659953931539407470495621824836570223697404931489960185796768872145882893348383311931058684147950284994536954265831032005645344696294253579799360912014817761873358888796545955974191021709753644575521998041827642041589721895044045980930852625485916835514940558187965584358347452650930302268008446431977397918214293502821599497633970075862760001650736520566952260001423171553461362588848929781360590057040212831994258783694027013289053834376791974167294527043946669963760259975273650548116897900664646809242902841107022557239712438496384819445301703021164043324282687280801738470244471443835900160721870265,
@@ -1570,7 +1540,7 @@ hardcode_rsa_key(3) ->
15068630434698373319269196003209754243798959461311186548759287649485250508074064775263867418602372588394608558985183294561315208336731894947137343239541687540387209051236354318837334154993136528453613256169847839789803932725339395739618592522865156272771578671216082079933457043120923342632744996962853951612,
asn1_NOVALUE};
hardcode_rsa_key(4) ->
-{'RSAPrivateKey',0,
+{'RSAPrivateKey', 'two-prime',
28617237755030755643854803617273584643843067580642149032833640135949799721163782522787597288521902619948688786051081993247908700824196122780349730169173433743054172191054872553484065655968335396052034378669869864779940355219732200954630251223541048434478476115391643898092650304645086338265930608997389611376417609043761464100338332976874588396803891301015812818307951159858145399281035705713082131199940309445719678087542976246147777388465712394062188801177717719764254900022006288880246925156931391594131839991579403409541227225173269459173129377291869028712271737734702830877034334838181789916127814298794576266389,
17,
26933870828264240605980991639786903194205240075898493207372837775011576208154148256741268036255908348187001210401018346586267012540419880263858569570986761169933338532757527109161473558558433313931326474042230460969355628442100895016122589386862163232450330461545076609969553227901257730132640573174013751883368376011370428995523268034111482031427024082719896108094847702954695363285832195666458915142143884210891427766607838346722974883433132513540317964796373298134261669479023445911856492129270184781873446960437310543998533283339488055776892320162032014809906169940882070478200435536171854883284366514852906334641,
@@ -1581,7 +1551,7 @@ hardcode_rsa_key(4) ->
34340318160575773065401929915821192439103777558577109939078671096408836197675640654693301707202885840826672396546056002756167635035389371579540325327619480512374920136684787633921441576901246290213545161954865184290700344352088099063404416346968182170720521708773285279884132629954461545103181082503707725012,
asn1_NOVALUE};
hardcode_rsa_key(5) ->
-{'RSAPrivateKey',0,
+{'RSAPrivateKey', 'two-prime',
26363170152814518327068346871197765236382539835597898797762992537312221863402655353436079974302838986536256364057947538018476963115004626096654613827403121905035011992899481598437933532388248462251770039307078647864188314916665766359828262009578648593031111569685489178543405615478739906285223620987558499488359880003693226535420421293716164794046859453204135383236667988765227190694994861629971618548127529849059769249520775574008363789050621665120207265361610436965088511042779948238320901918522125988916609088415989475825860046571847719492980547438560049874493788767083330042728150253120940100665370844282489982633,
17,
10855423004100095781734025182257903332628104638187370093196526338893267826106975733767797636477639582691399679317978398007608161282648963686857782164224814902073240232370374775827384395689278778574258251479385325591136364965685903795223402003944149420659869469870495544106108194608892902588033255700759382142132115013969680562678811046675523365751498355532768935784747314021422035957153013494814430893022253205880275287307995039363642554998244274484818208792520243113824379110193356010059999642946040953102866271737127640405568982049887176990990501963784502429481034227543991366980671390566584211881030995602076468001,
@@ -1592,7 +1562,7 @@ hardcode_rsa_key(5) ->
40624877259097915043489529504071755460170951428490878553842519165800720914888257733191322215286203357356050737713125202129282154441426952501134581314792133018830748896123382106683994268028624341502298766844710276939303555637478596035491641473828661569958212421472263269629366559343208764012473880251174832392,
asn1_NOVALUE};
hardcode_rsa_key(6) ->
-{'RSAPrivateKey',0,
+{'RSAPrivateKey', 'two-prime',
22748888494866396715768692484866595111939200209856056370972713870125588774286266397044592487895293134537316190976192161177144143633669641697309689280475257429554879273045671863645233402796222694405634510241820106743648116753479926387434021380537483429927516962909367257212902212159798399531316965145618774905828756510318897899298783143203190245236381440043169622358239226123652592179006905016804587837199618842875361941208299410035232803124113612082221121192550063791073372276763648926636149384299189072950588522522800393261949880796214514243704858378436010975184294077063518776479282353562934591448646412389762167039,
17,
6690849557313646092873144848490175032923294179369428344403739373566349639495960705013115437616262686628622409110644753287395336362844012263914614494257428655751435080307550548130951000822418439531068973600535325512837681398082331290421770994275730420566916753796872722709677121223470117509210872101652580854566448661533030419787125312956120661097410038933324613372774190658239039998357548275441758790939430824924502690997433186652165055694361752689819209062683281242276039100201318203707142383491769671330743466041394101421674581185260900666085723130684175548215193875544802254923825103844262661010117443222587769713,
@@ -1603,6 +1573,27 @@ hardcode_rsa_key(6) ->
81173034184183681160439870161505779100040258708276674532866007896310418779840630960490793104541748007902477778658270784073595697910785917474138815202903114440800310078464142273778315781957021015333260021813037604142367434117205299831740956310682461174553260184078272196958146289378701001596552915990080834227,
asn1_NOVALUE}.
+hardcode_dsa_key(1) ->
+ {'DSAPrivateKey',0,
+ 99438313664986922963487511141216248076486724382260996073922424025828494981416579966171753999204426907349400798052572573634137057487829150578821328280864500098312146772602202702021153757550650696224643730869835650674962433068943942837519621267815961566259265204876799778977478160416743037274938277357237615491,
+ 1454908511695148818053325447108751926908854531909,
+ 20302424198893709525243209250470907105157816851043773596964076323184805650258390738340248469444700378962907756890306095615785481696522324901068493502141775433048117442554163252381401915027666416630898618301033737438756165023568220631119672502120011809327566543827706483229480417066316015458225612363927682579,
+ 48598545580251057979126570873881530215432219542526130654707948736559463436274835406081281466091739849794036308281564299754438126857606949027748889019480936572605967021944405048011118039171039273602705998112739400664375208228641666852589396502386172780433510070337359132965412405544709871654840859752776060358,
+ 1457508827177594730669011716588605181448418352823};
+hardcode_dsa_key(2) ->
+ {'DSAPrivateKey',0,
+ 145447354557382582722944332987784622105075065624518040072393858097520305927329240484963764783346271194321683798321743658303478090647837211867389721684646254999291098347011037298359107547264573476540026676832159205689428125157386525591130716464335426605521884822982379206842523670736739023467072341958074788151,
+ 742801637799670234315651916144768554943688916729,
+ 79727684678125120155622004643594683941478642656111969487719464672433839064387954070113655822700268007902716505761008423792735229036965034283173483862273639257533568978482104785033927768441235063983341565088899599358397638308472931049309161811156189887217888328371767967629005149630676763492409067382020352505,
+ 35853727034965131665219275925554159789667905059030049940938124723126925435403746979702929280654735557166864135215989313820464108440192507913554896358611966877432546584986661291483639036057475682547385322659469460385785257933737832719745145778223672383438466035853830832837226950912832515496378486927322864228,
+ 801315110178350279541885862867982846569980443911};
+hardcode_dsa_key(3) ->
+ {'DSAPrivateKey',0,
+ 99438313664986922963487511141216248076486724382260996073922424025828494981416579966171753999204426907349400798052572573634137057487829150578821328280864500098312146772602202702021153757550650696224643730869835650674962433068943942837519621267815961566259265204876799778977478160416743037274938277357237615491,
+ 1454908511695148818053325447108751926908854531909,
+ 20302424198893709525243209250470907105157816851043773596964076323184805650258390738340248469444700378962907756890306095615785481696522324901068493502141775433048117442554163252381401915027666416630898618301033737438756165023568220631119672502120011809327566543827706483229480417066316015458225612363927682579,
+ 48598545580251057979126570873881530215432219542526130654707948736559463436274835406081281466091739849794036308281564299754438126857606949027748889019480936572605967021944405048011118039171039273602705998112739400664375208228641666852589396502386172780433510070337359132965412405544709871654840859752776060358,
+ 1457508827177594730669011716588605181448418352823}.
dtls_hello() ->
[1,
diff --git a/lib/ssl/test/ssl_to_openssl_SUITE.erl b/lib/ssl/test/ssl_to_openssl_SUITE.erl
index 5093ef3728..2e1a0b94ea 100644
--- a/lib/ssl/test/ssl_to_openssl_SUITE.erl
+++ b/lib/ssl/test/ssl_to_openssl_SUITE.erl
@@ -85,19 +85,19 @@ all_versions_tests() ->
].
dtls_all_versions_tests() ->
[
- %%erlang_client_openssl_server,
+ erlang_client_openssl_server,
erlang_server_openssl_client,
- %%erlang_client_openssl_server_dsa_cert,
+ erlang_client_openssl_server_dsa_cert,
erlang_server_openssl_client_dsa_cert,
- erlang_server_openssl_client_reuse_session
+ erlang_server_openssl_client_reuse_session,
%%erlang_client_openssl_server_renegotiate,
%%erlang_client_openssl_server_nowrap_seqnum,
%%erlang_server_openssl_client_nowrap_seqnum,
- %%erlang_client_openssl_server_no_server_ca_cert,
- %%erlang_client_openssl_server_client_cert,
- %%erlang_server_openssl_client_client_cert
- %%ciphers_rsa_signed_certs,
- %%ciphers_dsa_signed_certs,
+ erlang_client_openssl_server_no_server_ca_cert,
+ erlang_client_openssl_server_client_cert,
+ erlang_server_openssl_client_client_cert,
+ ciphers_rsa_signed_certs,
+ ciphers_dsa_signed_certs
%%erlang_client_bad_openssl_server,
%%expired_session
].
@@ -142,12 +142,11 @@ init_per_suite(Config0) ->
catch crypto:stop(),
try crypto:start() of
ok ->
- ssl_test_lib:clean_start(),
- {ok, _} = make_certs:all(proplists:get_value(data_dir, Config0),
- proplists:get_value(priv_dir, Config0)),
- Config1 = ssl_test_lib:make_dsa_cert(Config0),
- Config = ssl_test_lib:cert_options(Config1),
- ssl_test_lib:cipher_restriction(Config)
+ ssl_test_lib:clean_start(),
+
+ Config1 = ssl_test_lib:make_rsa_cert(Config0),
+ Config2 = ssl_test_lib:make_dsa_cert(Config1),
+ ssl_test_lib:cipher_restriction(Config2)
catch _:_ ->
{skip, "Crypto did not start"}
end
@@ -157,7 +156,8 @@ end_per_suite(_Config) ->
ssl:stop(),
application:stop(crypto).
-init_per_group(basic, Config) ->
+init_per_group(basic, Config0) ->
+ Config = ssl_test_lib:clean_tls_version(Config0),
case ssl_test_lib:supports_ssl_tls_version(sslv2) of
true ->
[{v2_hello_compatible, true} | Config];
@@ -183,8 +183,13 @@ init_per_group(GroupName, Config) ->
Config
end.
-end_per_group(_GroupName, Config) ->
- Config.
+end_per_group(GroupName, Config) ->
+ case ssl_test_lib:is_tls_version(GroupName) of
+ true ->
+ ssl_test_lib:clean_tls_version(Config);
+ false ->
+ Config
+ end.
init_per_testcase(expired_session, Config) ->
ct:timetrap(?EXPIRE * 1000 * 5),
@@ -196,7 +201,7 @@ init_per_testcase(expired_session, Config) ->
init_per_testcase(TestCase, Config) when TestCase == ciphers_rsa_signed_certs;
TestCase == ciphers_dsa_signed_certs ->
- ct:timetrap({seconds, 45}),
+ ct:timetrap({seconds, 60}),
special_init(TestCase, Config);
init_per_testcase(TestCase, Config) ->
@@ -270,13 +275,24 @@ special_init(TestCase, Config)
check_openssl_npn_support(Config)
end;
-special_init(TestCase, Config)
+special_init(TestCase, Config0)
when TestCase == erlang_server_openssl_client_sni_match;
TestCase == erlang_server_openssl_client_sni_no_match;
TestCase == erlang_server_openssl_client_sni_no_header;
TestCase == erlang_server_openssl_client_sni_match_fun;
TestCase == erlang_server_openssl_client_sni_no_match_fun;
TestCase == erlang_server_openssl_client_sni_no_header_fun ->
+ RsaOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config0),
+ Config = [{sni_server_opts, [{sni_hosts,
+ [{"a.server", [
+ {certfile, proplists:get_value(certfile, RsaOpts)},
+ {keyfile, proplists:get_value(keyfile, RsaOpts)}
+ ]},
+ {"b.server", [
+ {certfile, proplists:get_value(certfile, RsaOpts)},
+ {keyfile, proplists:get_value(keyfile, RsaOpts)}
+ ]}
+ ]}]} | Config0],
check_openssl_sni_support(Config);
special_init(_, Config) ->
@@ -295,8 +311,8 @@ basic_erlang_client_openssl_server() ->
[{doc,"Test erlang client with openssl server"}].
basic_erlang_client_openssl_server(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
- ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -335,10 +351,10 @@ basic_erlang_server_openssl_client() ->
[{doc,"Test erlang server with openssl client"}].
basic_erlang_server_openssl_client(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
V2Compat = proplists:get_value(v2_hello_compatible, Config),
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Data = "From openssl to erlang",
ct:pal("v2_hello_compatible: ~p", [V2Compat]),
@@ -351,7 +367,8 @@ basic_erlang_server_openssl_client(Config) when is_list(Config) ->
Port = ssl_test_lib:inet_port(Server),
Exe = "openssl",
- Args = ["s_client", "-connect", "localhost:" ++ integer_to_list(Port) | workaround_openssl_s_clinent()],
+ Args = ["s_client", "-connect", hostname_format(Hostname) ++
+ ":" ++ integer_to_list(Port) | workaround_openssl_s_clinent()],
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
true = port_command(OpenSslPort, Data),
@@ -368,8 +385,8 @@ erlang_client_openssl_server() ->
[{doc,"Test erlang client with openssl server"}].
erlang_client_openssl_server(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
- ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -408,9 +425,9 @@ erlang_server_openssl_client() ->
[{doc,"Test erlang server with openssl client"}].
erlang_server_openssl_client(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Data = "From openssl to erlang",
@@ -422,7 +439,7 @@ erlang_server_openssl_client(Config) when is_list(Config) ->
Version = ssl_test_lib:protocol_version(Config),
Exe = "openssl",
- Args = ["s_client", "-connect", "localhost: " ++ integer_to_list(Port),
+ Args = ["s_client", "-connect", hostname_format(Hostname) ++":" ++ integer_to_list(Port),
ssl_test_lib:version_flag(Version)],
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
@@ -441,7 +458,7 @@ erlang_client_openssl_server_dsa_cert() ->
erlang_client_openssl_server_dsa_cert(Config) when is_list(Config) ->
process_flag(trap_exit, true),
ClientOpts = ssl_test_lib:ssl_options(client_dsa_opts, Config),
- ServerOpts = ssl_test_lib:ssl_options(server_dsa_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_dsa_verify_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -486,7 +503,7 @@ erlang_server_openssl_client_dsa_cert(Config) when is_list(Config) ->
ClientOpts = ssl_test_lib:ssl_options(client_dsa_opts, Config),
ServerOpts = ssl_test_lib:ssl_options(server_dsa_verify_opts, Config),
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Data = "From openssl to erlang",
CaCertFile = proplists:get_value(cacertfile, ClientOpts),
@@ -500,7 +517,7 @@ erlang_server_openssl_client_dsa_cert(Config) when is_list(Config) ->
Port = ssl_test_lib:inet_port(Server),
Version = ssl_test_lib:protocol_version(Config),
Exe = "openssl",
- Args = ["s_client", "-connect", "localhost: " ++ integer_to_list(Port),
+ Args = ["s_client", "-connect", hostname_format(Hostname) ++ ":" ++ integer_to_list(Port),
ssl_test_lib:version_flag(Version),
"-cert", CertFile,
"-CAfile", CaCertFile,
@@ -523,9 +540,9 @@ erlang_server_openssl_client_reuse_session() ->
"same session id, to test reusing of sessions."}].
erlang_server_openssl_client_reuse_session(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Data = "From openssl to erlang",
@@ -538,7 +555,8 @@ erlang_server_openssl_client_reuse_session(Config) when is_list(Config) ->
Version = ssl_test_lib:protocol_version(Config),
Exe = "openssl",
- Args = ["s_client", "-connect", "localhost:" ++ integer_to_list(Port),
+ Args = ["s_client", "-connect", hostname_format(Hostname)
+ ++ ":" ++ integer_to_list(Port),
ssl_test_lib:version_flag(Version),
"-reconnect"],
@@ -560,8 +578,8 @@ erlang_client_openssl_server_renegotiate() ->
[{doc,"Test erlang client when openssl server issuses a renegotiate"}].
erlang_client_openssl_server_renegotiate(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
- ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -610,8 +628,8 @@ erlang_client_openssl_server_nowrap_seqnum() ->
" to lower treashold substantially."}].
erlang_client_openssl_server_nowrap_seqnum(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
- ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -653,9 +671,9 @@ erlang_server_openssl_client_nowrap_seqnum() ->
" to lower treashold substantially."}].
erlang_server_openssl_client_nowrap_seqnum(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Data = "From openssl to erlang",
@@ -669,7 +687,7 @@ erlang_server_openssl_client_nowrap_seqnum(Config) when is_list(Config) ->
Port = ssl_test_lib:inet_port(Server),
Version = ssl_test_lib:protocol_version(Config),
Exe = "openssl",
- Args = ["s_client","-connect", "localhost: " ++ integer_to_list(Port),
+ Args = ["s_client","-connect", hostname_format(Hostname) ++ ":" ++ integer_to_list(Port),
ssl_test_lib:version_flag(Version),
"-msg"],
@@ -692,8 +710,8 @@ erlang_client_openssl_server_no_server_ca_cert() ->
"implicitly tested eleswhere."}].
erlang_client_openssl_server_no_server_ca_cert(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
- ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -733,8 +751,8 @@ erlang_client_openssl_server_client_cert() ->
[{doc,"Test erlang client with openssl server when client sends cert"}].
erlang_client_openssl_server_client_cert(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
- ClientOpts = ssl_test_lib:ssl_options(client_verification_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_verify_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_verify_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -776,10 +794,10 @@ erlang_server_openssl_client_client_cert() ->
[{doc,"Test erlang server with openssl client when client sends cert"}].
erlang_server_openssl_client_client_cert(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
- ClientOpts = ssl_test_lib:ssl_options(client_verification_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_verify_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_verify_opts, Config),
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Data = "From openssl to erlang",
@@ -799,7 +817,7 @@ erlang_server_openssl_client_client_cert(Config) when is_list(Config) ->
Exe = "openssl",
Args = ["s_client", "-cert", CertFile,
"-CAfile", CaCertFile,
- "-key", KeyFile,"-connect", "localhost:" ++ integer_to_list(Port),
+ "-key", KeyFile,"-connect", hostname_format(Hostname) ++ ":" ++ integer_to_list(Port),
ssl_test_lib:version_flag(Version)],
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
@@ -817,8 +835,8 @@ erlang_server_erlang_client_client_cert() ->
[{doc,"Test erlang server with erlang client when client sends cert"}].
erlang_server_erlang_client_client_cert(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = proplists:get_value(server_verification_opts, Config),
- ClientOpts = proplists:get_value(client_verification_opts, Config),
+ ServerOpts = proplists:get_value(server_rsa_verify_opts, Config),
+ ClientOpts = proplists:get_value(client_rsa_verify_opts, Config),
Version = ssl_test_lib:protocol_version(Config),
{ClientNode, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
@@ -863,7 +881,8 @@ ciphers_dsa_signed_certs() ->
[{doc,"Test cipher suites that uses dsa certs"}].
ciphers_dsa_signed_certs(Config) when is_list(Config) ->
Version = ssl_test_lib:protocol_version(Config),
- Ciphers = ssl_test_lib:dsa_suites(tls_record:protocol_version(Version)),
+ NVersion = ssl_test_lib:protocol_version(Config, tuple),
+ Ciphers = ssl_test_lib:dsa_suites(NVersion),
run_suites(Ciphers, Version, Config, dsa).
%%--------------------------------------------------------------------
@@ -871,8 +890,8 @@ erlang_client_bad_openssl_server() ->
[{doc,"Test what happens if openssl server sends garbage to erlang ssl client"}].
erlang_client_bad_openssl_server(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_verification_opts, Config),
- ClientOpts = ssl_test_lib:ssl_options(client_verification_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -926,8 +945,8 @@ expired_session() ->
"better code coverage of the ssl_manager module"}].
expired_session(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ClientOpts = ssl_test_lib:ssl_options(client_opts, Config),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+ ClientOpts = ssl_test_lib:ssl_options(client_rsa_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
Port = ssl_test_lib:inet_port(node()),
@@ -980,9 +999,9 @@ ssl2_erlang_server_openssl_client() ->
ssl2_erlang_server_openssl_client(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server_error([{node, ServerNode}, {port, 0},
{from, self()},
@@ -990,7 +1009,7 @@ ssl2_erlang_server_openssl_client(Config) when is_list(Config) ->
Port = ssl_test_lib:inet_port(Server),
Exe = "openssl",
- Args = ["s_client", "-connect", "localhost:" ++ integer_to_list(Port),
+ Args = ["s_client", "-connect", hostname_format(Hostname) ++ ":" ++ integer_to_list(Port),
"-ssl2", "-msg"],
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
@@ -1005,12 +1024,12 @@ ssl2_erlang_server_openssl_client_comp() ->
ssl2_erlang_server_openssl_client_comp(Config) when is_list(Config) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
V2Compat = proplists:get_value(v2_hello_compatible, Config),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Data = "From openssl to erlang",
@@ -1020,7 +1039,7 @@ ssl2_erlang_server_openssl_client_comp(Config) when is_list(Config) ->
Port = ssl_test_lib:inet_port(Server),
Exe = "openssl",
- Args = ["s_client", "-connect", "localhost:" ++ integer_to_list(Port),
+ Args = ["s_client", "-connect", hostname_format(Hostname) ++ ":" ++ integer_to_list(Port),
"-ssl2", "-msg"],
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
@@ -1248,22 +1267,22 @@ erlang_server_openssl_client_npn_only_client(Config) when is_list(Config) ->
ok.
%--------------------------------------------------------------------------
erlang_server_openssl_client_sni_no_header(Config) when is_list(Config) ->
- erlang_server_openssl_client_sni_test(Config, undefined, undefined, "server").
+ erlang_server_openssl_client_sni_test(Config, undefined, undefined, "server Peer cert").
erlang_server_openssl_client_sni_no_header_fun(Config) when is_list(Config) ->
- erlang_server_openssl_client_sni_test_sni_fun(Config, undefined, undefined, "server").
+ erlang_server_openssl_client_sni_test_sni_fun(Config, undefined, undefined, "server Peer cert").
-erlang_server_openssl_client_sni_match(Config) when is_list(Config) ->
- erlang_server_openssl_client_sni_test(Config, "a.server", "a.server", "a.server").
+erlang_server_openssl_client_sni_match(Config) when is_list(Config) ->
+ erlang_server_openssl_client_sni_test(Config, "a.server", "a.server", "server Peer cert").
erlang_server_openssl_client_sni_match_fun(Config) when is_list(Config) ->
- erlang_server_openssl_client_sni_test_sni_fun(Config, "a.server", "a.server", "a.server").
+ erlang_server_openssl_client_sni_test_sni_fun(Config, "a.server", "a.server", "server Peer cert").
erlang_server_openssl_client_sni_no_match(Config) when is_list(Config) ->
- erlang_server_openssl_client_sni_test(Config, "c.server", undefined, "server").
+ erlang_server_openssl_client_sni_test(Config, "c.server", undefined, "server Peer cert").
erlang_server_openssl_client_sni_no_match_fun(Config) when is_list(Config) ->
- erlang_server_openssl_client_sni_test_sni_fun(Config, "c.server", undefined, "server").
+ erlang_server_openssl_client_sni_test_sni_fun(Config, "c.server", undefined, "server Peer cert").
%%--------------------------------------------------------------------
@@ -1273,11 +1292,11 @@ run_suites(Ciphers, Version, Config, Type) ->
{ClientOpts, ServerOpts} =
case Type of
rsa ->
- {ssl_test_lib:ssl_options(client_opts, Config),
- ssl_test_lib:ssl_options(server_opts, Config)};
+ {ssl_test_lib:ssl_options(client_rsa_opts, Config),
+ ssl_test_lib:ssl_options(server_rsa_opts, Config)};
dsa ->
- {ssl_test_lib:ssl_options(client_opts, Config),
- ssl_test_lib:ssl_options(server_dsa_opts, Config)}
+ {ssl_test_lib:ssl_options(client_dsa_opts, Config),
+ ssl_test_lib:ssl_options(server_dsa_verify_opts, Config)}
end,
Result = lists:map(fun(Cipher) ->
@@ -1330,7 +1349,7 @@ send_and_hostname(SSLSocket) ->
erlang_server_openssl_client_sni_test(Config, SNIHostname, ExpectedSNIHostname, ExpectedCN) ->
ct:log("Start running handshake, Config: ~p, SNIHostname: ~p, ExpectedSNIHostname: ~p, ExpectedCN: ~p", [Config, SNIHostname, ExpectedSNIHostname, ExpectedCN]),
- ServerOptions = proplists:get_value(sni_server_opts, Config) ++ proplists:get_value(server_opts, Config),
+ ServerOptions = proplists:get_value(sni_server_opts, Config) ++ proplists:get_value(server_rsa_opts, Config),
{_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
{from, self()}, {mfa, {?MODULE, send_and_hostname, []}},
@@ -1344,11 +1363,7 @@ erlang_server_openssl_client_sni_test(Config, SNIHostname, ExpectedSNIHostname,
openssl_client_args(ssl_test_lib:supports_ssl_tls_version(sslv2), Hostname, Port, SNIHostname)
end,
ClientPort = ssl_test_lib:portable_open_port(Exe, ClientArgs),
-
- %% Client check needs to be done befor server check,
- %% or server check might consume client messages
- ExpectedClientOutput = ["OK", "/CN=" ++ ExpectedCN ++ "/"],
- client_check_result(ClientPort, ExpectedClientOutput),
+
ssl_test_lib:check_result(Server, ExpectedSNIHostname),
ssl_test_lib:close_port(ClientPort),
ssl_test_lib:close(Server),
@@ -1359,7 +1374,7 @@ erlang_server_openssl_client_sni_test_sni_fun(Config, SNIHostname, ExpectedSNIHo
ct:log("Start running handshake for sni_fun, Config: ~p, SNIHostname: ~p, ExpectedSNIHostname: ~p, ExpectedCN: ~p", [Config, SNIHostname, ExpectedSNIHostname, ExpectedCN]),
[{sni_hosts, ServerSNIConf}] = proplists:get_value(sni_server_opts, Config),
SNIFun = fun(Domain) -> proplists:get_value(Domain, ServerSNIConf, undefined) end,
- ServerOptions = proplists:get_value(server_opts, Config) ++ [{sni_fun, SNIFun}],
+ ServerOptions = proplists:get_value(server_rsa_opts, Config) ++ [{sni_fun, SNIFun}],
{_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
{from, self()}, {mfa, {?MODULE, send_and_hostname, []}},
@@ -1375,10 +1390,6 @@ erlang_server_openssl_client_sni_test_sni_fun(Config, SNIHostname, ExpectedSNIHo
ClientPort = ssl_test_lib:portable_open_port(Exe, ClientArgs),
- %% Client check needs to be done befor server check,
- %% or server check might consume client messages
- ExpectedClientOutput = ["OK", "/CN=" ++ ExpectedCN ++ "/"],
- client_check_result(ClientPort, ExpectedClientOutput),
ssl_test_lib:check_result(Server, ExpectedSNIHostname),
ssl_test_lib:close_port(ClientPort),
ssl_test_lib:close(Server).
@@ -1442,8 +1453,8 @@ cipher(CipherSuite, Version, Config, ClientOpts, ServerOpts) ->
start_erlang_client_and_openssl_server_with_opts(Config, ErlangClientOpts, OpensslServerOpts, Data, Callback) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
- ClientOpts0 = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ ClientOpts0 = ssl_test_lib:ssl_options(client_rsa_opts, Config),
ClientOpts = ErlangClientOpts ++ ClientOpts0,
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -1488,8 +1499,8 @@ start_erlang_client_and_openssl_server_with_opts(Config, ErlangClientOpts, Opens
start_erlang_client_and_openssl_server_for_alpn_negotiation(Config, Data, Callback) ->
process_flag(trap_exit, true),
- ServerOpts = proplists:get_value(server_opts, Config),
- ClientOpts0 = proplists:get_value(client_opts, Config),
+ ServerOpts = proplists:get_value(server_rsa_opts, Config),
+ ClientOpts0 = proplists:get_value(client_rsa_opts, Config),
ClientOpts = [{alpn_advertised_protocols, [<<"spdy/2">>]} | ClientOpts0],
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -1524,7 +1535,7 @@ start_erlang_client_and_openssl_server_for_alpn_negotiation(Config, Data, Callba
start_erlang_server_and_openssl_client_for_alpn_negotiation(Config, Data, Callback) ->
process_flag(trap_exit, true),
- ServerOpts0 = proplists:get_value(server_opts, Config),
+ ServerOpts0 = proplists:get_value(server_rsa_opts, Config),
ServerOpts = [{alpn_preferred_protocols, [<<"spdy/2">>]} | ServerOpts0],
{_, ServerNode, _} = ssl_test_lib:run_where(Config),
@@ -1553,8 +1564,8 @@ start_erlang_server_and_openssl_client_for_alpn_negotiation(Config, Data, Callba
start_erlang_client_and_openssl_server_for_alpn_npn_negotiation(Config, Data, Callback) ->
process_flag(trap_exit, true),
- ServerOpts = proplists:get_value(server_opts, Config),
- ClientOpts0 = proplists:get_value(client_opts, Config),
+ ServerOpts = proplists:get_value(server_rsa_opts, Config),
+ ClientOpts0 = proplists:get_value(client_rsa_opts, Config),
ClientOpts = [{alpn_advertised_protocols, [<<"spdy/2">>]},
{client_preferred_next_protocols, {client, [<<"spdy/3">>, <<"http/1.1">>]}} | ClientOpts0],
@@ -1593,7 +1604,7 @@ start_erlang_client_and_openssl_server_for_alpn_npn_negotiation(Config, Data, Ca
start_erlang_server_and_openssl_client_for_alpn_npn_negotiation(Config, Data, Callback) ->
process_flag(trap_exit, true),
- ServerOpts0 = proplists:get_value(server_opts, Config),
+ ServerOpts0 = proplists:get_value(server_rsa_opts, Config),
ServerOpts = [{alpn_preferred_protocols, [<<"spdy/2">>]},
{next_protocols_advertised, [<<"spdy/3">>, <<"http/1.1">>]} | ServerOpts0],
@@ -1620,8 +1631,8 @@ start_erlang_server_and_openssl_client_for_alpn_npn_negotiation(Config, Data, Ca
start_erlang_client_and_openssl_server_for_npn_negotiation(Config, Data, Callback) ->
process_flag(trap_exit, true),
- ServerOpts = ssl_test_lib:ssl_options(server_opts, Config),
- ClientOpts0 = ssl_test_lib:ssl_options(client_opts, Config),
+ ServerOpts = ssl_test_lib:ssl_options(server_rsa_opts, Config),
+ ClientOpts0 = ssl_test_lib:ssl_options(client_rsa_opts, Config),
ClientOpts = [{client_preferred_next_protocols, {client, [<<"spdy/2">>], <<"http/1.1">>}} | ClientOpts0],
{ClientNode, _, Hostname} = ssl_test_lib:run_where(Config),
@@ -1658,10 +1669,10 @@ start_erlang_client_and_openssl_server_for_npn_negotiation(Config, Data, Callbac
start_erlang_server_and_openssl_client_for_npn_negotiation(Config, Data, Callback) ->
process_flag(trap_exit, true),
- ServerOpts0 = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts0 = ssl_test_lib:ssl_options(server_rsa_opts, Config),
ServerOpts = [{next_protocols_advertised, [<<"spdy/2">>]}, ServerOpts0],
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
@@ -1672,7 +1683,8 @@ start_erlang_server_and_openssl_client_for_npn_negotiation(Config, Data, Callbac
Version = ssl_test_lib:protocol_version(Config),
Exe = "openssl",
- Args = ["s_client", "-nextprotoneg", "http/1.0,spdy/2", "-msg", "-connect", "localhost:"
+ Args = ["s_client", "-nextprotoneg", "http/1.0,spdy/2", "-msg", "-connect",
+ hostname_format(Hostname) ++ ":"
++ integer_to_list(Port), ssl_test_lib:version_flag(Version)],
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
@@ -1687,10 +1699,10 @@ start_erlang_server_and_openssl_client_for_npn_negotiation(Config, Data, Callbac
start_erlang_server_and_openssl_client_with_opts(Config, ErlangServerOpts, OpenSSLClientOpts, Data, Callback) ->
process_flag(trap_exit, true),
- ServerOpts0 = ssl_test_lib:ssl_options(server_opts, Config),
+ ServerOpts0 = ssl_test_lib:ssl_options(server_rsa_opts, Config),
ServerOpts = ErlangServerOpts ++ ServerOpts0,
- {_, ServerNode, _} = ssl_test_lib:run_where(Config),
+ {_, ServerNode, Hostname} = ssl_test_lib:run_where(Config),
Server = ssl_test_lib:start_server([{node, ServerNode}, {port, 0},
@@ -1701,8 +1713,9 @@ start_erlang_server_and_openssl_client_with_opts(Config, ErlangServerOpts, OpenS
Version = ssl_test_lib:protocol_version(Config),
Exe = "openssl",
- Args = ["s_client"] ++ OpenSSLClientOpts ++ ["-msg", "-connect", "localhost:" ++ integer_to_list(Port),
- ssl_test_lib:version_flag(Version)],
+ Args = ["s_client"] ++ OpenSSLClientOpts ++ ["-msg", "-connect",
+ hostname_format(Hostname) ++ ":" ++ integer_to_list(Port),
+ ssl_test_lib:version_flag(Version)],
OpenSslPort = ssl_test_lib:portable_open_port(Exe, Args),
@@ -1854,3 +1867,11 @@ consume_port_exit(OpenSSLPort) ->
{'EXIT', OpenSSLPort, _} ->
ok
end.
+
+hostname_format(Hostname) ->
+ case lists:member($., Hostname) of
+ true ->
+ Hostname;
+ false ->
+ "localhost"
+ end.
diff --git a/lib/stdlib/doc/src/ets.xml b/lib/stdlib/doc/src/ets.xml
index f6f3d18d6a..95af2b77a5 100644
--- a/lib/stdlib/doc/src/ets.xml
+++ b/lib/stdlib/doc/src/ets.xml
@@ -408,9 +408,9 @@
calls cannot be in the guard or body of the fun. Calls to built-in
match specification functions is of course allowed:</p>
<pre>
-4> <input>ets:fun2ms(fun({M,N}) when N > X, is_atomm(M) -> M end).</input>
+4> <input>ets:fun2ms(fun({M,N}) when N > X, my_fun(M) -> M end).</input>
Error: fun containing local Erlang function calls
-('is_atomm' called in guard) cannot be translated into match_spec
+('my_fun' called in guard) cannot be translated into match_spec
{error,transform_error}
5> <input>ets:fun2ms(fun({M,N}) when N > X, is_atom(M) -> M end).</input>
[{{'$1','$2'},[{'>','$2',{const,3}},{is_atom,'$1'}],['$1']}]</pre>
diff --git a/lib/stdlib/doc/src/lists.xml b/lib/stdlib/doc/src/lists.xml
index 60dbae70c2..7efafedc82 100644
--- a/lib/stdlib/doc/src/lists.xml
+++ b/lib/stdlib/doc/src/lists.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>1996</year><year>2016</year>
+ <year>1996</year><year>2017</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -187,7 +187,7 @@
<desc>
<p>Calls <c><anno>Fun</anno>(<anno>Elem</anno>)</c> on successive
elements <c>Elem</c> of <c><anno>List1</anno></c>.
- <c><anno>Fun</anno>/2</c> must return either a Boolean or a tuple
+ <c><anno>Fun</anno>/1</c> must return either a Boolean or a tuple
<c>{true, <anno>Value</anno>}</c>. The function returns the list of
elements for which <c><anno>Fun</anno></c> returns a new value, where
a value of <c>true</c> is synonymous with
diff --git a/lib/stdlib/doc/src/notes.xml b/lib/stdlib/doc/src/notes.xml
index 46454e9b80..bdd5b39cd3 100644
--- a/lib/stdlib/doc/src/notes.xml
+++ b/lib/stdlib/doc/src/notes.xml
@@ -31,6 +31,38 @@
</header>
<p>This document describes the changes made to the STDLIB application.</p>
+<section><title>STDLIB 3.4.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p> A bug in <c>proc_lib:format()</c> introduced in
+ Erlang/OTP 20.0 is corrected. </p>
+ <p>
+ Own Id: OTP-14482 Aux Id: PR-1488 </p>
+ </item>
+ <item>
+ <p>
+ Fix string:len/1 to be compatible with previous versions.</p>
+ <p>
+ Own Id: OTP-14487 Aux Id: ERIERL-40 </p>
+ </item>
+ <item>
+ <p>
+ In OTP-20.0, the behavior of c, make, and ct_make was
+ changed so that in some cases the beam files by default
+ would be written to the directory where the source files
+ were found. This is now changed back to the old behavior
+ so beam files are by default written to current
+ directory.</p>
+ <p>
+ Own Id: OTP-14489 Aux Id: ERL-438 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>STDLIB 3.4</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/stdlib/doc/src/rand.xml b/lib/stdlib/doc/src/rand.xml
index 563ca0b268..89fb858823 100644
--- a/lib/stdlib/doc/src/rand.xml
+++ b/lib/stdlib/doc/src/rand.xml
@@ -73,7 +73,7 @@
<p>Jump function: equivalent to 2^64 calls</p>
<p>
This is a corrected version of the previous default algorithm,
- that now has been superseeded by Xoroshiro116+ (<c>exrop</c>).
+ that now has been superseded by Xoroshiro116+ (<c>exrop</c>).
Since there is no native 58 bit rotate instruction this
algorithm executes a little (say &lt; 15%) faster than <c>exrop</c>.
See the
diff --git a/lib/stdlib/src/array.erl b/lib/stdlib/src/array.erl
index 079b761463..a237eaa489 100644
--- a/lib/stdlib/src/array.erl
+++ b/lib/stdlib/src/array.erl
@@ -1603,7 +1603,7 @@ foldl_2(I, E, A, Ix, F, D, N, R, S) ->
Ix + S, F, D, N, R, S).
-spec foldl_3(pos_integer(), _, A, array_indx(),
- fun((array_indx, _, A) -> B), integer()) -> B.
+ fun((array_indx(), _, A) -> B), integer()) -> B.
foldl_3(I, E, A, Ix, F, N) when I =< N ->
foldl_3(I+1, E, F(Ix, element(I, E), A), Ix+1, F, N);
diff --git a/lib/stdlib/src/c.erl b/lib/stdlib/src/c.erl
index 4ab9234b81..c04a201ce1 100644
--- a/lib/stdlib/src/c.erl
+++ b/lib/stdlib/src/c.erl
@@ -255,7 +255,7 @@ safe_recompile(File, Options, BeamFile) ->
compile_and_load(File, Opts0) when is_list(Opts0) ->
Opts = [report_errors, report_warnings
| ensure_from(filename:extension(File),
- ensure_outdir(filename:dirname(File), Opts0))],
+ ensure_outdir(".", Opts0))],
case compile:file(File, Opts) of
{ok,Mod} -> %Listing file.
purge_and_load(Mod, File, Opts);
diff --git a/lib/stdlib/src/dets.erl b/lib/stdlib/src/dets.erl
index 10e8c9c800..4e3fe0e5c1 100644
--- a/lib/stdlib/src/dets.erl
+++ b/lib/stdlib/src/dets.erl
@@ -1354,7 +1354,7 @@ open_file_loop2(Head, N) ->
?MODULE, [], Head);
Message ->
error_logger:format("** dets: unexpected message"
- "(ignored): ~w~n", [Message]),
+ "(ignored): ~tw~n", [Message]),
open_file_loop(Head, N)
end.
@@ -1403,7 +1403,7 @@ apply_op(Op, From, Head, N) ->
Head;
_Dirty when N =:= 0 -> % dirty or new_dirty
%% The updates seems to have declined
- dets_utils:vformat("** dets: Auto save of ~p\n",
+ dets_utils:vformat("** dets: Auto save of ~tp\n",
[Head#head.name]),
{NewHead, _Res} = perform_save(Head, true),
erlang:garbage_collect(),
@@ -1587,13 +1587,13 @@ bug_found(Name, Op, Bad, From) ->
%% If stream_op/5 found more requests, this is not
%% the last operation.
error_logger:format
- ("** dets: Bug was found when accessing table ~w,~n"
- "** dets: operation was ~p and reply was ~w.~n"
- "** dets: Stacktrace: ~w~n",
+ ("** dets: Bug was found when accessing table ~tw,~n"
+ "** dets: operation was ~tp and reply was ~tw.~n"
+ "** dets: Stacktrace: ~tw~n",
[Name, Op, Bad, erlang:get_stacktrace()]);
false ->
error_logger:format
- ("** dets: Bug was found when accessing table ~w~n",
+ ("** dets: Bug was found when accessing table ~tw~n",
[Name])
end,
if
@@ -2117,7 +2117,7 @@ do_open_file([Fname, Verbose], Parent, Server, Ref) ->
Error;
Bad ->
error_logger:format
- ("** dets: Bug was found in open_file/1, reply was ~w.~n",
+ ("** dets: Bug was found in open_file/1, reply was ~tw.~n",
[Bad]),
{error, {dets_bug, Fname, Bad}}
end;
@@ -2135,7 +2135,7 @@ do_open_file([Tab, OpenArgs, Verb], Parent, Server, _Ref) ->
Bad ->
error_logger:format
("** dets: Bug was found in open_file/2, arguments were~n"
- "** dets: ~w and reply was ~w.~n",
+ "** dets: ~tw and reply was ~tw.~n",
[OpenArgs, Bad]),
{error, {dets_bug, Tab, {open_file, OpenArgs}, Bad}}
end.
@@ -3123,7 +3123,7 @@ check_safe_fixtable(Head) ->
((get(verbose) =:= yes) orelse dets_utils:debug_mode()) of
true ->
error_logger:format
- ("** dets: traversal of ~p needs safe_fixtable~n",
+ ("** dets: traversal of ~tp needs safe_fixtable~n",
[Head#head.name]);
false ->
ok
@@ -3189,7 +3189,7 @@ scan_read(H, From, _To, Min, _L, Ts, R, C) ->
err(Error) ->
case get(verbose) of
yes ->
- error_logger:format("** dets: failed with ~w~n", [Error]),
+ error_logger:format("** dets: failed with ~tw~n", [Error]),
Error;
undefined ->
Error
diff --git a/lib/stdlib/src/dets_utils.erl b/lib/stdlib/src/dets_utils.erl
index da6ebd18f2..17f55ebdc2 100644
--- a/lib/stdlib/src/dets_utils.erl
+++ b/lib/stdlib/src/dets_utils.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -387,7 +387,7 @@ corrupt_reason(Head, Reason0) ->
corrupt(Head, Error) ->
case get(verbose) of
yes ->
- error_logger:format("** dets: Corrupt table ~p: ~tp\n",
+ error_logger:format("** dets: Corrupt table ~tp: ~tp\n",
[Head#head.name, Error]);
_ -> ok
end,
diff --git a/lib/stdlib/src/edlin.erl b/lib/stdlib/src/edlin.erl
index 71e8471c45..5df9c504f9 100644
--- a/lib/stdlib/src/edlin.erl
+++ b/lib/stdlib/src/edlin.erl
@@ -83,7 +83,7 @@ edit_line(Cs, {line,P,L,M}) ->
edit_line1(Cs, {line,P,L,{blink,N}}) ->
edit(Cs, P, L, none, [{move_rel,N}]);
edit_line1(Cs, {line,P,{[],[]},none}) ->
- {more_chars, {line,P,{lists:reverse(Cs),[]},none},[{put_chars, unicode, Cs}]};
+ {more_chars, {line,P,{string:reverse(Cs),[]},none},[{put_chars, unicode, Cs}]};
edit_line1(Cs, {line,P,L,M}) ->
edit(Cs, P, L, M, []).
@@ -93,14 +93,14 @@ edit([C|Cs], P, {Bef,Aft}, Prefix, Rs0) ->
case key_map(C, Prefix) of
meta ->
edit(Cs, P, {Bef,Aft}, meta, Rs0);
- meta_o ->
- edit(Cs, P, {Bef,Aft}, meta_o, Rs0);
- meta_csi ->
- edit(Cs, P, {Bef,Aft}, meta_csi, Rs0);
- meta_meta ->
- edit(Cs, P, {Bef,Aft}, meta_meta, Rs0);
- {csi, _} = Csi ->
- edit(Cs, P, {Bef,Aft}, Csi, Rs0);
+ meta_o ->
+ edit(Cs, P, {Bef,Aft}, meta_o, Rs0);
+ meta_csi ->
+ edit(Cs, P, {Bef,Aft}, meta_csi, Rs0);
+ meta_meta ->
+ edit(Cs, P, {Bef,Aft}, meta_meta, Rs0);
+ {csi, _} = Csi ->
+ edit(Cs, P, {Bef,Aft}, Csi, Rs0);
meta_left_sq_bracket ->
edit(Cs, P, {Bef,Aft}, meta_left_sq_bracket, Rs0);
search_meta ->
@@ -110,8 +110,8 @@ edit([C|Cs], P, {Bef,Aft}, Prefix, Rs0) ->
ctlx ->
edit(Cs, P, {Bef,Aft}, ctlx, Rs0);
new_line ->
- {done, reverse(Bef, Aft ++ "\n"), Cs,
- reverse(Rs0, [{move_rel,length(Aft)},{put_chars,unicode,"\n"}])};
+ {done, get_line(Bef, Aft ++ "\n"), Cs,
+ reverse(Rs0, [{move_rel,cp_len(Aft)},{put_chars,unicode,"\n"}])};
redraw_line ->
Rs1 = erase(P, Bef, Aft, Rs0),
Rs = redraw(P, Bef, Aft, Rs1),
@@ -157,7 +157,7 @@ edit([], P, L, {blink,N}, Rs) ->
edit([], P, L, Prefix, Rs) ->
{more_chars,{line,P,L,Prefix},reverse(Rs)};
edit(eof, _, {Bef,Aft}, _, Rs) ->
- {done,reverse(Bef, Aft),[],reverse(Rs, [{move_rel,length(Aft)}])}.
+ {done,get_line(Bef, Aft),[],reverse(Rs, [{move_rel,cp_len(Aft)}])}.
%% %% Assumes that arg is a string
%% %% Horizontal whitespace only.
@@ -279,11 +279,21 @@ key_map(C, search) -> {insert_search,C};
key_map(C, _) -> {undefined,C}.
%% do_op(Action, Before, After, Requests)
-
-do_op({insert,C}, Bef, [], Rs) ->
- {{[C|Bef],[]},[{put_chars, unicode,[C]}|Rs]};
-do_op({insert,C}, Bef, Aft, Rs) ->
- {{[C|Bef],Aft},[{insert_chars, unicode, [C]}|Rs]};
+%% Before and After are of lists of type string:grapheme_cluster()
+do_op({insert,C}, [], [], Rs) ->
+ {{[C],[]},[{put_chars, unicode,[C]}|Rs]};
+do_op({insert,C}, [Bef|Bef0], [], Rs) ->
+ case string:to_graphemes([Bef,C]) of
+ [GC] -> {{[GC|Bef0],[]},[{put_chars, unicode,[C]}|Rs]};
+ _ -> {{[C,Bef|Bef0],[]},[{put_chars, unicode,[C]}|Rs]}
+ end;
+do_op({insert,C}, [], Aft, Rs) ->
+ {{[C],Aft},[{insert_chars, unicode,[C]}|Rs]};
+do_op({insert,C}, [Bef|Bef0], Aft, Rs) ->
+ case string:to_graphemes([Bef,C]) of
+ [GC] -> {{[GC|Bef0],Aft},[{insert_chars, unicode,[C]}|Rs]};
+ _ -> {{[C,Bef|Bef0],Aft},[{insert_chars, unicode,[C]}|Rs]}
+ end;
%% Search mode prompt always looks like (search)`$TERMS': $RESULT.
%% the {insert_search, _} handlings allow to share this implementation
%% correctly with group.erl. This module provides $TERMS, and group.erl
@@ -299,13 +309,13 @@ do_op({insert_search, C}, Bef, [], Rs) ->
[{insert_chars, unicode, [C]++Aft}, {delete_chars,-3} | Rs],
search};
do_op({insert_search, C}, Bef, Aft, Rs) ->
- Offset= length(Aft),
+ Offset= cp_len(Aft),
NAft = "': ",
{{[C|Bef],NAft},
[{insert_chars, unicode, [C]++NAft}, {delete_chars,-Offset} | Rs],
search};
do_op({search, backward_delete_char}, [_|Bef], Aft, Rs) ->
- Offset= length(Aft)+1,
+ Offset= cp_len(Aft)+1,
NAft = "': ",
{{Bef,NAft},
[{insert_chars, unicode, NAft}, {delete_chars,-Offset}|Rs],
@@ -314,13 +324,13 @@ do_op({search, backward_delete_char}, [], _Aft, Rs) ->
Aft="': ",
{{[],Aft}, Rs, search};
do_op({search, skip_up}, Bef, Aft, Rs) ->
- Offset= length(Aft),
+ Offset= cp_len(Aft),
NAft = "': ",
{{[$\^R|Bef],NAft}, % we insert ^R as a flag to whoever called us
[{insert_chars, unicode, NAft}, {delete_chars,-Offset}|Rs],
search};
do_op({search, skip_down}, Bef, Aft, Rs) ->
- Offset= length(Aft),
+ Offset= cp_len(Aft),
NAft = "': ",
{{[$\^S|Bef],NAft}, % we insert ^S as a flag to whoever called us
[{insert_chars, unicode, NAft}, {delete_chars,-Offset}|Rs],
@@ -328,12 +338,12 @@ do_op({search, skip_down}, Bef, Aft, Rs) ->
do_op({search, search_found}, _Bef, Aft, Rs) ->
"': "++NAft = Aft,
{{[],NAft},
- [{put_chars, unicode, "\n"}, {move_rel,-length(Aft)} | Rs],
+ [{put_chars, unicode, "\n"}, {move_rel,-cp_len(Aft)} | Rs],
search_found};
do_op({search, search_quit}, _Bef, Aft, Rs) ->
"': "++NAft = Aft,
{{[],NAft},
- [{put_chars, unicode, "\n"}, {move_rel,-length(Aft)} | Rs],
+ [{put_chars, unicode, "\n"}, {move_rel,-cp_len(Aft)} | Rs],
search_quit};
%% do blink after $$
do_op({blink,C,M}, Bef=[$$,$$|_], Aft, Rs) ->
@@ -361,14 +371,16 @@ do_op(auto_blink, Bef, Aft, Rs) ->
N -> {blink,N+1,{Bef,Aft},
[{move_rel,-(N+1)}|Rs]}
end;
-do_op(forward_delete_char, Bef, [_|Aft], Rs) ->
- {{Bef,Aft},[{delete_chars,1}|Rs]};
-do_op(backward_delete_char, [_|Bef], Aft, Rs) ->
- {{Bef,Aft},[{delete_chars,-1}|Rs]};
+do_op(forward_delete_char, Bef, [GC|Aft], Rs) ->
+ {{Bef,Aft},[{delete_chars,gc_len(GC)}|Rs]};
+do_op(backward_delete_char, [GC|Bef], Aft, Rs) ->
+ {{Bef,Aft},[{delete_chars,-gc_len(GC)}|Rs]};
do_op(transpose_char, [C1,C2|Bef], [], Rs) ->
- {{[C2,C1|Bef],[]},[{put_chars, unicode,[C1,C2]},{move_rel,-2}|Rs]};
+ Len = gc_len(C1)+gc_len(C2),
+ {{[C2,C1|Bef],[]},[{put_chars, unicode,[C1,C2]},{move_rel,-Len}|Rs]};
do_op(transpose_char, [C2|Bef], [C1|Aft], Rs) ->
- {{[C2,C1|Bef],Aft},[{put_chars, unicode,[C1,C2]},{move_rel,-1}|Rs]};
+ Len = gc_len(C2),
+ {{[C2,C1|Bef],Aft},[{put_chars, unicode,[C1,C2]},{move_rel,-Len}|Rs]};
do_op(kill_word, Bef, Aft0, Rs) ->
{Aft1,Kill0,N0} = over_non_word(Aft0, [], 0),
{Aft,Kill,N} = over_word(Aft1, Kill0, N0),
@@ -381,7 +393,7 @@ do_op(backward_kill_word, Bef0, Aft, Rs) ->
{{Bef,Aft},[{delete_chars,-N}|Rs]};
do_op(kill_line, Bef, Aft, Rs) ->
put(kill_buffer, Aft),
- {{Bef,[]},[{delete_chars,length(Aft)}|Rs]};
+ {{Bef,[]},[{delete_chars,cp_len(Aft)}|Rs]};
do_op(yank, Bef, [], Rs) ->
Kill = get(kill_buffer),
{{reverse(Kill, Bef),[]},[{put_chars, unicode,Kill}|Rs]};
@@ -389,9 +401,9 @@ do_op(yank, Bef, Aft, Rs) ->
Kill = get(kill_buffer),
{{reverse(Kill, Bef),Aft},[{insert_chars, unicode,Kill}|Rs]};
do_op(forward_char, Bef, [C|Aft], Rs) ->
- {{[C|Bef],Aft},[{move_rel,1}|Rs]};
+ {{[C|Bef],Aft},[{move_rel,gc_len(C)}|Rs]};
do_op(backward_char, [C|Bef], Aft, Rs) ->
- {{Bef,[C|Aft]},[{move_rel,-1}|Rs]};
+ {{Bef,[C|Aft]},[{move_rel,-gc_len(C)}|Rs]};
do_op(forward_word, Bef0, Aft0, Rs) ->
{Aft1,Bef1,N0} = over_non_word(Aft0, Bef0, 0),
{Aft,Bef,N} = over_word(Aft1, Bef1, N0),
@@ -400,17 +412,17 @@ do_op(backward_word, Bef0, Aft0, Rs) ->
{Bef1,Aft1,N0} = over_non_word(Bef0, Aft0, 0),
{Bef,Aft,N} = over_word(Bef1, Aft1, N0),
{{Bef,Aft},[{move_rel,-N}|Rs]};
-do_op(beginning_of_line, [C|Bef], Aft, Rs) ->
- {{[],reverse(Bef, [C|Aft])},[{move_rel,-(length(Bef)+1)}|Rs]};
+do_op(beginning_of_line, [_|_]=Bef, Aft, Rs) ->
+ {{[],reverse(Bef, Aft)},[{move_rel,-(cp_len(Bef))}|Rs]};
do_op(beginning_of_line, [], Aft, Rs) ->
{{[],Aft},Rs};
-do_op(end_of_line, Bef, [C|Aft], Rs) ->
- {{reverse(Aft, [C|Bef]),[]},[{move_rel,length(Aft)+1}|Rs]};
+do_op(end_of_line, Bef, [_|_]=Aft, Rs) ->
+ {{reverse(Aft, Bef),[]},[{move_rel,cp_len(Aft)}|Rs]};
do_op(end_of_line, Bef, [], Rs) ->
{{Bef,[]},Rs};
do_op(ctlu, Bef, Aft, Rs) ->
put(kill_buffer, reverse(Bef)),
- {{[], Aft}, [{delete_chars, -length(Bef)} | Rs]};
+ {{[], Aft}, [{delete_chars, -cp_len(Bef)} | Rs]};
do_op(beep, Bef, Aft, Rs) ->
{{Bef,Aft},[beep|Rs]};
do_op(_, Bef, Aft, Rs) ->
@@ -436,7 +448,7 @@ over_word(Cs, Stack, N) ->
until_quote([$\'|Cs], Stack, N) ->
{Cs, [$\'|Stack], N+1};
until_quote([C|Cs], Stack, N) ->
- until_quote(Cs, [C|Stack], N+1).
+ until_quote(Cs, [C|Stack], N+gc_len(C)).
over_word1([$\'=C|Cs], Stack, N) ->
until_quote(Cs, [C|Stack], N+1);
@@ -445,7 +457,7 @@ over_word1(Cs, Stack, N) ->
over_word2([C|Cs], Stack, N) ->
case word_char(C) of
- true -> over_word2(Cs, [C|Stack], N+1);
+ true -> over_word2(Cs, [C|Stack], N+gc_len(C));
false -> {[C|Cs],Stack,N}
end;
over_word2([], Stack, N) when is_integer(N) ->
@@ -454,7 +466,7 @@ over_word2([], Stack, N) when is_integer(N) ->
over_non_word([C|Cs], Stack, N) ->
case word_char(C) of
true -> {[C|Cs],Stack,N};
- false -> over_non_word(Cs, [C|Stack], N+1)
+ false -> over_non_word(Cs, [C|Stack], N+gc_len(C))
end;
over_non_word([], Stack, N) ->
{[],Stack,N}.
@@ -465,6 +477,7 @@ word_char(C) when C >= $a, C =< $z -> true;
word_char(C) when C >= $ß, C =< $ÿ, C =/= $÷ -> true;
word_char(C) when C >= $0, C =< $9 -> true;
word_char(C) when C =:= $_ -> true;
+word_char([_|_]) -> true; %% Is grapheme
word_char(_) -> false.
%% over_white(Chars, InitialStack, InitialCount) ->
@@ -488,8 +501,8 @@ over_paren(Chars, Paren, Match) ->
over_paren([C,$$,$$|Cs], Paren, Match, D, N, L) ->
over_paren([C|Cs], Paren, Match, D, N+2, L);
-over_paren([_,$$|Cs], Paren, Match, D, N, L) ->
- over_paren(Cs, Paren, Match, D, N+2, L);
+over_paren([GC,$$|Cs], Paren, Match, D, N, L) ->
+ over_paren(Cs, Paren, Match, D, N+1+gc_len(GC), L);
over_paren([Match|_], _Paren, Match, 1, N, _) ->
N;
over_paren([Match|Cs], Paren, Match, D, N, [Match|L]) ->
@@ -518,8 +531,8 @@ over_paren([$[|_], _, _, _, _, _) ->
over_paren([${|_], _, _, _, _, _) ->
beep;
-over_paren([_|Cs], Paren, Match, D, N, L) ->
- over_paren(Cs, Paren, Match, D, N+1, L);
+over_paren([GC|Cs], Paren, Match, D, N, L) ->
+ over_paren(Cs, Paren, Match, D, N+gc_len(GC), L);
over_paren([], _, _, _, _, _) ->
0.
@@ -529,8 +542,8 @@ over_paren_auto(Chars) ->
over_paren_auto([C,$$,$$|Cs], D, N, L) ->
over_paren_auto([C|Cs], D, N+2, L);
-over_paren_auto([_,$$|Cs], D, N, L) ->
- over_paren_auto(Cs, D, N+2, L);
+over_paren_auto([GC,$$|Cs], D, N, L) ->
+ over_paren_auto(Cs, D, N+1+gc_len(GC), L);
over_paren_auto([$(|_], _, N, []) ->
{N, $)};
@@ -553,8 +566,8 @@ over_paren_auto([$[|Cs], D, N, [$[|L]) ->
over_paren_auto([${|Cs], D, N, [${|L]) ->
over_paren_auto(Cs, D, N+1, L);
-over_paren_auto([_|Cs], D, N, L) ->
- over_paren_auto(Cs, D, N+1, L);
+over_paren_auto([GC|Cs], D, N, L) ->
+ over_paren_auto(Cs, D, N+gc_len(GC), L);
over_paren_auto([], _, _, _) ->
0.
@@ -574,28 +587,43 @@ erase_inp({line,_,{Bef,Aft},_}) ->
reverse(erase([], Bef, Aft, [])).
erase(Pbs, Bef, Aft, Rs) ->
- [{delete_chars,-length(Pbs)-length(Bef)},{delete_chars,length(Aft)}|Rs].
+ [{delete_chars,-cp_len(Pbs)-cp_len(Bef)},{delete_chars,cp_len(Aft)}|Rs].
redraw_line({line,Pbs,{Bef,Aft},_}) ->
reverse(redraw(Pbs, Bef, Aft, [])).
redraw(Pbs, Bef, Aft, Rs) ->
- [{move_rel,-length(Aft)},{put_chars, unicode,reverse(Bef, Aft)},{put_chars, unicode,Pbs}|Rs].
+ [{move_rel,-cp_len(Aft)},{put_chars, unicode,reverse(Bef, Aft)},{put_chars, unicode,Pbs}|Rs].
length_before({line,Pbs,{Bef,_Aft},_}) ->
- length(Pbs) + length(Bef).
+ cp_len(Pbs) + cp_len(Bef).
length_after({line,_,{_Bef,Aft},_}) ->
- length(Aft).
+ cp_len(Aft).
prompt({line,Pbs,_,_}) ->
Pbs.
current_line({line,_,{Bef, Aft},_}) ->
- reverse(Bef, Aft ++ "\n").
+ get_line(Bef, Aft ++ "\n").
current_chars({line,_,{Bef,Aft},_}) ->
- reverse(Bef, Aft).
+ get_line(Bef, Aft).
+
+get_line(Bef, Aft) ->
+ unicode:characters_to_list(reverse(Bef, Aft)).
+
+%% Grapheme length in codepoints
+gc_len(CP) when is_integer(CP) -> 1;
+gc_len(CPs) when is_list(CPs) -> length(CPs).
+
+%% String length in codepoints
+cp_len(Str) ->
+ cp_len(Str, 0).
+
+cp_len([GC|R], Len) ->
+ cp_len(R, Len + gc_len(GC));
+cp_len([], Len) -> Len.
%% %% expand(CurrentBefore) ->
%% %% {yes,Expansion} | no
diff --git a/lib/stdlib/src/edlin_expand.erl b/lib/stdlib/src/edlin_expand.erl
index a1a97af4c5..bdcefda6e5 100644
--- a/lib/stdlib/src/edlin_expand.erl
+++ b/lib/stdlib/src/edlin_expand.erl
@@ -23,7 +23,7 @@
-export([expand/1, format_matches/1]).
--import(lists, [reverse/1, nthtail/2, prefix/2]).
+-import(lists, [reverse/1, prefix/2]).
%% expand(CurrentBefore) ->
%% {yes, Expansion, Matches} | {no, Matches}
@@ -75,15 +75,15 @@ to_atom(Str) ->
end.
match(Prefix, Alts, Extra0) ->
- Len = length(Prefix),
+ Len = string:length(Prefix),
Matches = lists:sort(
[{S, A} || {H, A} <- Alts,
- prefix(Prefix, S=hd(io_lib:fwrite("~w",[H])))]),
+ prefix(Prefix, S=flat_write(H))]),
case longest_common_head([N || {N, _} <- Matches]) of
{partial, []} ->
{no, [], Matches}; % format_matches(Matches)};
{partial, Str} ->
- case nthtail(Len, Str) of
+ case string:slice(Str, Len) of
[] ->
{yes, [], Matches}; % format_matches(Matches)};
Remain ->
@@ -94,18 +94,21 @@ match(Prefix, Alts, Extra0) ->
{"(",[{Str,0}]} -> "()";
{_,_} -> Extra0
end,
- {yes, nthtail(Len, Str) ++ Extra, []};
+ {yes, string:slice(Str, Len) ++ Extra, []};
no ->
{no, [], []}
end.
+flat_write(T) ->
+ lists:flatten(io_lib:fwrite("~tw",[T])).
+
%% Return the list of names L in multiple columns.
format_matches(L) ->
{S1, Dots} = format_col(lists:sort(L), []),
S = case Dots of
true ->
{_, Prefix} = longest_common_head(vals(L)),
- PrefixLen = length(Prefix),
+ PrefixLen = string:length(Prefix),
case PrefixLen =< 3 of
true -> S1; % Do not replace the prefix with "...".
false ->
@@ -128,7 +131,7 @@ format_col([A|T], Width, Len, Acc0, LL, Dots) ->
{H0, R} = format_val(A),
Hmax = LL - length(R),
{H, NewDots} =
- case length(H0) > Hmax of
+ case string:length(H0) > Hmax of
true -> {io_lib:format("~-*ts", [Hmax - 3, H0]) ++ "...", true};
false -> {H0, Dots}
end,
@@ -149,12 +152,12 @@ format_val(H) ->
field_width(L, LL) -> field_width(L, 0, LL).
field_width([{H,_}|T], W, LL) ->
- case length(H) of
+ case string:length(H) of
L when L > W -> field_width(T, L, LL);
_ -> field_width(T, W, LL)
end;
field_width([H|T], W, LL) ->
- case length(H) of
+ case string:length(H) of
L when L > W -> field_width(T, L, LL);
_ -> field_width(T, W, LL)
end;
@@ -169,10 +172,11 @@ vals([S|L]) -> [S|vals(L)].
leading_dots([], _Len) -> [];
leading_dots([{H, I}|L], Len) ->
- [{"..." ++ nthtail(Len, H), I}|leading_dots(L, Len)];
+ [{"..." ++ string:slice(H, Len), I}|leading_dots(L, Len)];
leading_dots([H|L], Len) ->
- ["..." ++ nthtail(Len, H)|leading_dots(L, Len)].
+ ["..." ++ string:slice(H, Len)|leading_dots(L, Len)].
+%% Strings are handled naively, but it should be OK here.
longest_common_head([]) ->
no;
longest_common_head(LL) ->
diff --git a/lib/stdlib/src/erl_lint.erl b/lib/stdlib/src/erl_lint.erl
index fcfd0d8493..9cd4727dc3 100644
--- a/lib/stdlib/src/erl_lint.erl
+++ b/lib/stdlib/src/erl_lint.erl
@@ -194,8 +194,6 @@ format_error({bad_nowarn_bif_clash,{F,A}}) ->
format_error(disallowed_nowarn_bif_clash) ->
io_lib:format("compile directive nowarn_bif_clash is no longer allowed,~n"
" - use explicit module names or -compile({no_auto_import, [F/A]})", []);
-format_error({bad_nowarn_deprecated_function,{M,F,A}}) ->
- io_lib:format("~tw:~tw/~w is not a deprecated function", [M,F,A]);
format_error({bad_on_load,Term}) ->
io_lib:format("badly formed on_load attribute: ~tw", [Term]);
format_error(multiple_on_loads) ->
@@ -856,14 +854,11 @@ not_deprecated(Forms, St0) ->
{nowarn_deprecated_function, MFAs0} <- lists:flatten([Args]),
MFA <- lists:flatten([MFAs0])],
Nowarn = [MFA || {MFA,_L} <- MFAsL],
- Bad = [MFAL || {{M,F,A},_L}=MFAL <- MFAsL,
- otp_internal:obsolete(M, F, A) =:= no],
- St1 = func_line_warning(bad_nowarn_deprecated_function, Bad, St0),
ML = [{M,L} || {{M,_F,_A},L} <- MFAsL, is_atom(M)],
- St3 = foldl(fun ({M,L}, St2) ->
+ St1 = foldl(fun ({M,L}, St2) ->
check_module_name(M, L, St2)
- end, St1, ML),
- St3#lint{not_deprecated = ordsets:from_list(Nowarn)}.
+ end, St0, ML),
+ St1#lint{not_deprecated = ordsets:from_list(Nowarn)}.
%% The nowarn_bif_clash directive is not only deprecated, it's actually an error from R14A
disallowed_compile_flags(Forms, St0) ->
@@ -3243,13 +3238,13 @@ icrt_clauses(Cs, In, Vt, St0) ->
icrt_clauses(Cs, Vt, St) ->
mapfoldl(fun (C, St0) -> icrt_clause(C, Vt, St0) end, St, Cs).
-icrt_clause({clause,_Line,H,G,B}, Vt0, St0) ->
+icrt_clause({clause,_Line,H,G,B}, Vt0, #lint{catch_scope=Scope}=St0) ->
{Hvt,Binvt,St1} = head(H, Vt0, St0),
Vt1 = vtupdate(Hvt, Binvt),
{Gvt,St2} = guard(G, vtupdate(Vt1, Vt0), St1),
Vt2 = vtupdate(Gvt, Vt1),
{Bvt,St3} = exprs(B, vtupdate(Vt2, Vt0), St2),
- {vtupdate(Bvt, Vt2),St3}.
+ {vtupdate(Bvt, Vt2),St3#lint{catch_scope=Scope}}.
icrt_export(Vts, Vt, {Tag,Attrs}, St) ->
{_File,Loc} = loc(Attrs, St),
diff --git a/lib/stdlib/src/error_logger_file_h.erl b/lib/stdlib/src/error_logger_file_h.erl
index b7c193f965..58da0cbdd6 100644
--- a/lib/stdlib/src/error_logger_file_h.erl
+++ b/lib/stdlib/src/error_logger_file_h.erl
@@ -126,7 +126,7 @@ format_body(State, [{Format,Args}|T]) ->
S0
catch
_:_ ->
- format(State, "ERROR: ~p - ~p\n", [Format,Args])
+ format(State, "ERROR: ~tp - ~tp\n", [Format,Args])
end,
[S|format_body(State, T)];
format_body(_State, []) ->
@@ -165,44 +165,26 @@ parse_event({warning_report, _GL, {Pid, std_warning, Args}}) ->
parse_event(_) -> ignore.
format_term(Term) when is_list(Term) ->
- case string_p(Term) of
+ case string_p(lists:flatten(Term)) of
true ->
- [{"~s\n",[Term]}];
+ [{"~ts\n",[Term]}];
false ->
format_term_list(Term)
end;
format_term(Term) ->
- [{"~p\n",[Term]}].
+ [{"~tp\n",[Term]}].
format_term_list([{Tag,Data}|T]) ->
- [{" ~p: ~p\n",[Tag,Data]}|format_term_list(T)];
+ [{" ~tp: ~tp\n",[Tag,Data]}|format_term_list(T)];
format_term_list([Data|T]) ->
- [{" ~p\n",[Data]}|format_term_list(T)];
+ [{" ~tp\n",[Data]}|format_term_list(T)];
format_term_list([]) ->
- [];
-format_term_list(_) ->
- %% Continue to allow non-proper lists for now.
- %% FIXME: Remove this clause in OTP 19.
[].
string_p([]) ->
false;
-string_p(Term) ->
- string_p1(Term).
-
-string_p1([H|T]) when is_integer(H), H >= $\s, H < 255 ->
- string_p1(T);
-string_p1([$\n|T]) -> string_p1(T);
-string_p1([$\r|T]) -> string_p1(T);
-string_p1([$\t|T]) -> string_p1(T);
-string_p1([$\v|T]) -> string_p1(T);
-string_p1([$\b|T]) -> string_p1(T);
-string_p1([$\f|T]) -> string_p1(T);
-string_p1([$\e|T]) -> string_p1(T);
-string_p1([H|T]) when is_list(H) ->
- string_p1(H) andalso string_p1(T);
-string_p1([]) -> true;
-string_p1(_) -> false.
+string_p(FlatList) ->
+ io_lib:printable_list(FlatList).
get_utc_config() ->
%% SASL utc_log configuration overrides stdlib config
@@ -225,7 +207,7 @@ header(Time, Title) ->
end.
header({{Y,Mo,D},{H,Mi,S}}, Title, UTC) ->
- io_lib:format("~n=~s==== ~p-~s-~p::~s:~s:~s ~s===~n",
+ io_lib:format("~n=~ts==== ~p-~s-~p::~s:~s:~s ~s===~n",
[Title,D,month(Mo),Y,t(H),t(Mi),t(S),UTC]).
t(X) when is_integer(X) ->
diff --git a/lib/stdlib/src/error_logger_tty_h.erl b/lib/stdlib/src/error_logger_tty_h.erl
index 8f0d7b0362..fa940b7264 100644
--- a/lib/stdlib/src/error_logger_tty_h.erl
+++ b/lib/stdlib/src/error_logger_tty_h.erl
@@ -39,13 +39,16 @@
{user,
prev_handler,
io_mod=io,
- depth=unlimited}).
+ depth=unlimited,
+ modifier=""}).
%% This one is used when we takeover from the simple error_logger.
init({[], {error_logger, Buf}}) ->
User = set_group_leader(),
Depth = error_logger:get_format_depth(),
- State = #st{user=User,prev_handler=error_logger,depth=Depth},
+ Modifier = modifier(),
+ State = #st{user=User,prev_handler=error_logger,
+ depth=Depth,modifier=Modifier},
write_events(State, Buf),
{ok, State};
%% This one is used if someone took over from us, and now wants to
@@ -57,7 +60,8 @@ init({[], {error_logger_tty_h, PrevHandler}}) ->
init([]) ->
User = set_group_leader(),
Depth = error_logger:get_format_depth(),
- {ok, #st{user=User,prev_handler=[],depth=Depth}}.
+ Modifier = modifier(),
+ {ok, #st{user=User,prev_handler=[],depth=Depth,modifier=Modifier}}.
handle_event({_Type, GL, _Msg}, State) when node(GL) =/= node() ->
{ok, State};
@@ -91,8 +95,9 @@ code_change(_OldVsn, State, _Extra) ->
write_event(Event, IoMod) ->
do_write_event(#st{io_mod=IoMod}, Event).
-write_event(Event, IoMod, Depth) ->
- do_write_event(#st{io_mod=IoMod,depth=Depth}, Event).
+write_event(Event, IoMod, {Depth, Enc}) ->
+ Modifier = modifier(Enc),
+ do_write_event(#st{io_mod=IoMod,depth=Depth,modifier=Modifier}, Event).
%%% ------------------------------------------------------
@@ -120,12 +125,12 @@ write_events(State, [Ev|Es]) ->
write_events(_State, []) ->
ok.
-do_write_event(State, {Time, Event}) ->
- case parse_event(Event) of
+do_write_event(#st{modifier=M}=State, {Time, Event}) ->
+ case parse_event(Event,M) of
ignore ->
ok;
{Title,Pid,FormatList} ->
- Header = header(Time, Title),
+ Header = header(Time, Title, M),
Body = format_body(State, FormatList),
AtNode = if
node(Pid) =/= node() ->
@@ -144,13 +149,13 @@ do_write_event(State, {Time, Event}) ->
do_write_event(_, _) ->
ok.
-format_body(State, [{Format,Args}|T]) ->
+format_body(#st{modifier=M}=State, [{Format,Args}|T]) ->
S = try format(State, Format, Args) of
S0 ->
S0
catch
_:_ ->
- format(State, "ERROR: ~p - ~p\n", [Format,Args])
+ format(State, "ERROR: ~"++M++"p - ~"++M++"p\n", [Format,Args])
end,
[S|format_body(State, T)];
format_body(_State, []) ->
@@ -174,62 +179,41 @@ limit_format([H|T], Depth) ->
limit_format([], _) ->
[].
-parse_event({error, _GL, {Pid, Format, Args}}) ->
+parse_event({error, _GL, {Pid, Format, Args}},_) ->
{"ERROR REPORT",Pid,[{Format,Args}]};
-parse_event({info_msg, _GL, {Pid, Format, Args}}) ->
+parse_event({info_msg, _GL, {Pid, Format, Args}},_) ->
{"INFO REPORT",Pid,[{Format, Args}]};
-parse_event({warning_msg, _GL, {Pid, Format, Args}}) ->
+parse_event({warning_msg, _GL, {Pid, Format, Args}},_) ->
{"WARNING REPORT",Pid,[{Format,Args}]};
-parse_event({error_report, _GL, {Pid, std_error, Args}}) ->
- {"ERROR REPORT",Pid,format_term(Args)};
-parse_event({info_report, _GL, {Pid, std_info, Args}}) ->
- {"INFO REPORT",Pid,format_term(Args)};
-parse_event({warning_report, _GL, {Pid, std_warning, Args}}) ->
- {"WARNING REPORT",Pid,format_term(Args)};
-parse_event(_) -> ignore.
-
-format_term(Term) when is_list(Term) ->
- case string_p(Term) of
+parse_event({error_report, _GL, {Pid, std_error, Args}},M) ->
+ {"ERROR REPORT",Pid,format_term(Args,M)};
+parse_event({info_report, _GL, {Pid, std_info, Args}},M) ->
+ {"INFO REPORT",Pid,format_term(Args,M)};
+parse_event({warning_report, _GL, {Pid, std_warning, Args}},M) ->
+ {"WARNING REPORT",Pid,format_term(Args,M)};
+parse_event(_,_) -> ignore.
+
+format_term(Term,M) when is_list(Term) ->
+ case string_p(lists:flatten(Term)) of
true ->
- [{"~s\n",[Term]}];
+ [{"~"++M++"s\n",[Term]}];
false ->
- format_term_list(Term)
+ format_term_list(Term,M)
end;
-format_term(Term) ->
- [{"~p\n",[Term]}].
-
-format_term_list([{Tag,Data}|T]) ->
- [{" ~p: ~p\n",[Tag,Data]}|format_term_list(T)];
-format_term_list([Data|T]) ->
- [{" ~p\n",[Data]}|format_term_list(T)];
-format_term_list([]) ->
- [];
-format_term_list(_) ->
- %% Continue to allow non-proper lists for now.
- %% FIXME: Remove this clause in OTP 19.
+format_term(Term,M) ->
+ [{"~"++M++"p\n",[Term]}].
+
+format_term_list([{Tag,Data}|T],M) ->
+ [{" ~"++M++"p: ~"++M++"p\n",[Tag,Data]}|format_term_list(T,M)];
+format_term_list([Data|T],M) ->
+ [{" ~"++M++"p\n",[Data]}|format_term_list(T,M)];
+format_term_list([],_) ->
[].
string_p([]) ->
false;
-string_p(Term) ->
- string_p1(Term).
-
-string_p1([H|T]) when is_integer(H), H >= $\s, H < 255 ->
- string_p1(T);
-string_p1([$\n|T]) -> string_p1(T);
-string_p1([$\r|T]) -> string_p1(T);
-string_p1([$\t|T]) -> string_p1(T);
-string_p1([$\v|T]) -> string_p1(T);
-string_p1([$\b|T]) -> string_p1(T);
-string_p1([$\f|T]) -> string_p1(T);
-string_p1([$\e|T]) -> string_p1(T);
-string_p1([H|T]) when is_list(H) ->
- case string_p1(H) of
- true -> string_p1(T);
- _ -> false
- end;
-string_p1([]) -> true;
-string_p1(_) -> false.
+string_p(FlatList) ->
+ io_lib:printable_list(FlatList).
get_utc_config() ->
%% SASL utc_log configuration overrides stdlib config
@@ -243,16 +227,16 @@ get_utc_config() ->
end
end.
-header(Time, Title) ->
+header(Time, Title, M) ->
case get_utc_config() of
true ->
- header(Time, Title, "UTC ");
+ header(Time, Title, "UTC ", M);
_ ->
- header(calendar:universal_time_to_local_time(Time), Title, "")
+ header(calendar:universal_time_to_local_time(Time), Title, "", M)
end.
-header({{Y,Mo,D},{H,Mi,S}}, Title, UTC) ->
- io_lib:format("~n=~s==== ~p-~s-~p::~s:~s:~s ~s===~n",
+header({{Y,Mo,D},{H,Mi,S}}, Title, UTC, M) ->
+ io_lib:format("~n=~"++M++"s==== ~p-~s-~p::~s:~s:~s ~s===~n",
[Title,D,month(Mo),Y,t(H),t(Mi),t(S),UTC]).
t(X) when is_integer(X) ->
@@ -274,3 +258,13 @@ month(9) -> "Sep";
month(10) -> "Oct";
month(11) -> "Nov";
month(12) -> "Dec".
+
+modifier() ->
+ modifier(encoding()).
+modifier(latin1) ->
+ "";
+modifier(_) ->
+ "t".
+
+encoding() ->
+ proplists:get_value(encoding,io:getopts(),latin1).
diff --git a/lib/stdlib/src/escript.erl b/lib/stdlib/src/escript.erl
index 2093916a7c..2b9d8ff65b 100644
--- a/lib/stdlib/src/escript.erl
+++ b/lib/stdlib/src/escript.erl
@@ -860,7 +860,7 @@ code_handler(Name, Args, Dict, File) ->
%% io:format("Calling:~p~n",[{Mod,Name,Args}]),
apply(Mod, Name, Args);
error ->
- io:format("Script does not export ~w/~w\n", [Name,Arity]),
+ io:format("Script does not export ~tw/~w\n", [Name,Arity]),
my_halt(127)
end
end.
diff --git a/lib/stdlib/src/ets.erl b/lib/stdlib/src/ets.erl
index 898b2f5bba..b5d3cd3c8d 100644
--- a/lib/stdlib/src/ets.erl
+++ b/lib/stdlib/src/ets.erl
@@ -1693,7 +1693,7 @@ choice(Height, Width, P, Mode, Tab, Key, Turn, Opos) ->
end,
choice(Height, Width, P, Mode, Tab, Key, Turn, Opos);
[$/|Regexp] -> %% from regexp
- case re:compile(nonl(Regexp)) of
+ case re:compile(nonl(Regexp),[unicode]) of
{ok,Re} ->
re_search(Height, Width, Tab, ets:first(Tab), Re, 1, 1);
{error,{ErrorString,_Pos}} ->
diff --git a/lib/stdlib/src/gen.erl b/lib/stdlib/src/gen.erl
index 32f43fc706..33af0aed8f 100644
--- a/lib/stdlib/src/gen.erl
+++ b/lib/stdlib/src/gen.erl
@@ -422,7 +422,7 @@ debug_options(Name, Opts) ->
try sys:debug_options(Options)
catch _:_ ->
error_logger:format(
- "~p: ignoring erroneous debug options - ~p~n",
+ "~tp: ignoring erroneous debug options - ~tp~n",
[Name,Options]),
[]
end;
diff --git a/lib/stdlib/src/gen_event.erl b/lib/stdlib/src/gen_event.erl
index da2b0da3ca..a9b98911e2 100644
--- a/lib/stdlib/src/gen_event.erl
+++ b/lib/stdlib/src/gen_event.erl
@@ -441,15 +441,15 @@ system_replace_state(StateFun, [ServerName, MSL, HibernateAfterTimeout, Hib]) ->
print_event(Dev, {in, Msg}, Name) ->
case Msg of
{notify, Event} ->
- io:format(Dev, "*DBG* ~p got event ~p~n", [Name, Event]);
+ io:format(Dev, "*DBG* ~tp got event ~tp~n", [Name, Event]);
{_,_,{call, Handler, Query}} ->
- io:format(Dev, "*DBG* ~p(~p) got call ~p~n",
+ io:format(Dev, "*DBG* ~tp(~tp) got call ~tp~n",
[Name, Handler, Query]);
_ ->
- io:format(Dev, "*DBG* ~p got ~p~n", [Name, Msg])
+ io:format(Dev, "*DBG* ~tp got ~tp~n", [Name, Msg])
end;
print_event(Dev, Dbg, Name) ->
- io:format(Dev, "*DBG* ~p : ~p~n", [Name, Dbg]).
+ io:format(Dev, "*DBG* ~tp : ~tp~n", [Name, Dbg]).
%% server_add_handler(Handler, Args, MSL) -> {Ret, MSL'}.
@@ -582,8 +582,8 @@ server_update(Handler1, Func, Event, SName) ->
remove, SName, normal),
no;
{'EXIT', {undef, [{Mod1, handle_info, [_,_], _}|_]}} ->
- error_logger:warning_msg("** Undefined handle_info in ~p~n"
- "** Unhandled message: ~p~n", [Mod1, Event]),
+ error_logger:warning_msg("** Undefined handle_info in ~tp~n"
+ "** Unhandled message: ~tp~n", [Mod1, Event]),
{ok, Handler1};
Other ->
do_terminate(Mod1, Handler1, {error, Other}, State,
@@ -767,10 +767,10 @@ report_error(Handler, Reason, State, LastIn, SName) ->
State
end,
error_msg("** gen_event handler ~p crashed.~n"
- "** Was installed in ~p~n"
- "** Last event was: ~p~n"
- "** When handler state == ~p~n"
- "** Reason == ~p~n",
+ "** Was installed in ~tp~n"
+ "** Last event was: ~tp~n"
+ "** When handler state == ~tp~n"
+ "** Reason == ~tp~n",
[handler(Handler),SName,LastIn,FmtState,Reason1]).
handler(Handler) when not Handler#handler.id ->
diff --git a/lib/stdlib/src/gen_fsm.erl b/lib/stdlib/src/gen_fsm.erl
index 9ef0ca818c..96a53426e2 100644
--- a/lib/stdlib/src/gen_fsm.erl
+++ b/lib/stdlib/src/gen_fsm.erl
@@ -452,30 +452,30 @@ system_replace_state(StateFun, [Name, StateName, StateData, Mod, Time, Hibernate
print_event(Dev, {in, Msg}, {Name, StateName}) ->
case Msg of
{'$gen_event', Event} ->
- io:format(Dev, "*DBG* ~p got event ~p in state ~w~n",
+ io:format(Dev, "*DBG* ~tp got event ~tp in state ~tw~n",
[Name, Event, StateName]);
{'$gen_all_state_event', Event} ->
io:format(Dev,
- "*DBG* ~p got all_state_event ~p in state ~w~n",
+ "*DBG* ~tp got all_state_event ~tp in state ~tw~n",
[Name, Event, StateName]);
{timeout, Ref, {'$gen_timer', Message}} ->
io:format(Dev,
- "*DBG* ~p got timer ~p in state ~w~n",
+ "*DBG* ~tp got timer ~tp in state ~tw~n",
[Name, {timeout, Ref, Message}, StateName]);
{timeout, _Ref, {'$gen_event', Event}} ->
io:format(Dev,
- "*DBG* ~p got timer ~p in state ~w~n",
+ "*DBG* ~tp got timer ~tp in state ~tw~n",
[Name, Event, StateName]);
_ ->
- io:format(Dev, "*DBG* ~p got ~p in state ~w~n",
+ io:format(Dev, "*DBG* ~tp got ~tp in state ~tw~n",
[Name, Msg, StateName])
end;
print_event(Dev, {out, Msg, To, StateName}, Name) ->
- io:format(Dev, "*DBG* ~p sent ~p to ~w~n"
- " and switched to state ~w~n",
+ io:format(Dev, "*DBG* ~tp sent ~tp to ~tw~n"
+ " and switched to state ~tw~n",
[Name, Msg, To, StateName]);
print_event(Dev, return, {Name, StateName}) ->
- io:format(Dev, "*DBG* ~p switched to state ~w~n",
+ io:format(Dev, "*DBG* ~tp switched to state ~tw~n",
[Name, StateName]).
handle_msg(Msg, Parent, Name, StateName, StateData, Mod, _Time, HibernateAfterTimeout) -> %No debug here
@@ -500,7 +500,7 @@ handle_msg(Msg, Parent, Name, StateName, StateData, Mod, _Time, HibernateAfterTi
exit(R);
{'EXIT', {undef, [{Mod, handle_info, [_,_,_], _}|_]}} ->
error_logger:warning_msg("** Undefined handle_info in ~p~n"
- "** Unhandled message: ~p~n", [Mod, Msg]),
+ "** Unhandled message: ~tp~n", [Mod, Msg]),
loop(Parent, Name, StateName, StateData, Mod, infinity, HibernateAfterTimeout, []);
{'EXIT', What} ->
terminate(What, Name, Msg, Mod, StateName, StateData, []);
@@ -620,29 +620,29 @@ error_info(Reason, Name, Msg, StateName, StateData, Debug) ->
_ ->
Reason
end,
- Str = "** State machine ~p terminating \n" ++
+ Str = "** State machine ~tp terminating \n" ++
get_msg_str(Msg) ++
- "** When State == ~p~n"
- "** Data == ~p~n"
- "** Reason for termination = ~n** ~p~n",
+ "** When State == ~tp~n"
+ "** Data == ~tp~n"
+ "** Reason for termination = ~n** ~tp~n",
format(Str, [Name, get_msg(Msg), StateName, StateData, Reason1]),
sys:print_log(Debug),
ok.
get_msg_str({'$gen_event', _Event}) ->
- "** Last event in was ~p~n";
+ "** Last event in was ~tp~n";
get_msg_str({'$gen_sync_event', _Event}) ->
- "** Last sync event in was ~p~n";
+ "** Last sync event in was ~tp~n";
get_msg_str({'$gen_all_state_event', _Event}) ->
- "** Last event in was ~p (for all states)~n";
+ "** Last event in was ~tp (for all states)~n";
get_msg_str({'$gen_sync_all_state_event', _Event}) ->
- "** Last sync event in was ~p (for all states)~n";
+ "** Last sync event in was ~tp (for all states)~n";
get_msg_str({timeout, _Ref, {'$gen_timer', _Msg}}) ->
- "** Last timer event in was ~p~n";
+ "** Last timer event in was ~tp~n";
get_msg_str({timeout, _Ref, {'$gen_event', _Msg}}) ->
- "** Last timer event in was ~p~n";
+ "** Last timer event in was ~tp~n";
get_msg_str(_Msg) ->
- "** Last message in was ~p~n".
+ "** Last message in was ~tp~n".
get_msg({'$gen_event', Event}) -> Event;
get_msg({'$gen_sync_event', Event}) -> Event;
diff --git a/lib/stdlib/src/gen_server.erl b/lib/stdlib/src/gen_server.erl
index a3d53efd0d..7daa7a9fe4 100644
--- a/lib/stdlib/src/gen_server.erl
+++ b/lib/stdlib/src/gen_server.erl
@@ -621,7 +621,7 @@ try_dispatch(Mod, Func, Msg, State) ->
case erlang:function_exported(Mod, handle_info, 2) of
false ->
error_logger:warning_msg("** Undefined handle_info in ~p~n"
- "** Unhandled message: ~p~n",
+ "** Unhandled message: ~tp~n",
[Mod, Msg]),
{ok, {noreply, State}};
true ->
@@ -785,21 +785,21 @@ system_replace_state(StateFun, [Name, State, Mod, Time, HibernateAfterTimeout])
print_event(Dev, {in, Msg}, Name) ->
case Msg of
{'$gen_call', {From, _Tag}, Call} ->
- io:format(Dev, "*DBG* ~p got call ~p from ~w~n",
+ io:format(Dev, "*DBG* ~tp got call ~tp from ~w~n",
[Name, Call, From]);
{'$gen_cast', Cast} ->
- io:format(Dev, "*DBG* ~p got cast ~p~n",
+ io:format(Dev, "*DBG* ~tp got cast ~tp~n",
[Name, Cast]);
_ ->
- io:format(Dev, "*DBG* ~p got ~p~n", [Name, Msg])
+ io:format(Dev, "*DBG* ~tp got ~tp~n", [Name, Msg])
end;
print_event(Dev, {out, Msg, To, State}, Name) ->
- io:format(Dev, "*DBG* ~p sent ~p to ~w, new state ~w~n",
+ io:format(Dev, "*DBG* ~tp sent ~tp to ~w, new state ~tp~n",
[Name, Msg, To, State]);
print_event(Dev, {noreply, State}, Name) ->
- io:format(Dev, "*DBG* ~p new state ~w~n", [Name, State]);
+ io:format(Dev, "*DBG* ~tp new state ~tp~n", [Name, State]);
print_event(Dev, Event, Name) ->
- io:format(Dev, "*DBG* ~p dbg ~p~n", [Name, Event]).
+ io:format(Dev, "*DBG* ~tp dbg ~tp~n", [Name, Event]).
%%% ---------------------------------------------------
@@ -881,10 +881,10 @@ error_info(Reason, Name, From, Msg, State, Debug) ->
end,
{ClientFmt, ClientArgs} = client_stacktrace(From),
LimitedState = error_logger:limit_term(State),
- error_logger:format("** Generic server ~p terminating \n"
- "** Last message in was ~p~n"
- "** When Server state == ~p~n"
- "** Reason for termination == ~n** ~p~n" ++ ClientFmt,
+ error_logger:format("** Generic server ~tp terminating \n"
+ "** Last message in was ~tp~n"
+ "** When Server state == ~tp~n"
+ "** Reason for termination == ~n** ~tp~n" ++ ClientFmt,
[Name, Msg, LimitedState, Reason1] ++ ClientArgs),
sys:print_log(Debug),
ok.
@@ -898,11 +898,11 @@ client_stacktrace(From) when is_pid(From), node(From) =:= node() ->
{"** Client ~p is dead~n", [From]};
[{current_stacktrace, Stacktrace}, {registered_name, []}] ->
{"** Client ~p stacktrace~n"
- "** ~p~n",
+ "** ~tp~n",
[From, Stacktrace]};
[{current_stacktrace, Stacktrace}, {registered_name, Name}] ->
- {"** Client ~p stacktrace~n"
- "** ~p~n",
+ {"** Client ~tp stacktrace~n"
+ "** ~tp~n",
[Name, Stacktrace]}
end;
client_stacktrace(From) when is_pid(From) ->
diff --git a/lib/stdlib/src/gen_statem.erl b/lib/stdlib/src/gen_statem.erl
index b5e9da1e66..1110d18af6 100644
--- a/lib/stdlib/src/gen_statem.erl
+++ b/lib/stdlib/src/gen_statem.erl
@@ -791,34 +791,34 @@ format_status(
print_event(Dev, {in,Event}, {Name,State}) ->
io:format(
- Dev, "*DBG* ~p receive ~s in state ~p~n",
+ Dev, "*DBG* ~tp receive ~ts in state ~tp~n",
[Name,event_string(Event),State]);
print_event(Dev, {out,Reply,{To,_Tag}}, {Name,State}) ->
io:format(
- Dev, "*DBG* ~p send ~p to ~p from state ~p~n",
+ Dev, "*DBG* ~tp send ~tp to ~p from state ~tp~n",
[Name,Reply,To,State]);
print_event(Dev, {terminate,Reason}, {Name,State}) ->
io:format(
- Dev, "*DBG* ~p terminate ~p in state ~p~n",
+ Dev, "*DBG* ~tp terminate ~tp in state ~tp~n",
[Name,Reason,State]);
print_event(Dev, {Tag,Event,NextState}, {Name,State}) ->
StateString =
case NextState of
State ->
- io_lib:format("~p", [State]);
+ io_lib:format("~tp", [State]);
_ ->
- io_lib:format("~p => ~p", [State,NextState])
+ io_lib:format("~tp => ~tp", [State,NextState])
end,
io:format(
- Dev, "*DBG* ~p ~w ~s in state ~s~n",
+ Dev, "*DBG* ~tp ~tw ~ts in state ~ts~n",
[Name,Tag,event_string(Event),StateString]).
event_string(Event) ->
case Event of
{{call,{Pid,_Tag}},Request} ->
- io_lib:format("call ~p from ~w", [Request,Pid]);
+ io_lib:format("call ~tp from ~w", [Request,Pid]);
{EventType,EventContent} ->
- io_lib:format("~w ~p", [EventType,EventContent])
+ io_lib:format("~tw ~tp", [EventType,EventContent])
end.
sys_debug(Debug, #{name := Name}, State, Entry) ->
@@ -1732,25 +1732,25 @@ error_info(
CallbackMode
end,
error_logger:format(
- "** State machine ~p terminating~n" ++
+ "** State machine ~tp terminating~n" ++
case Q of
[] -> "";
- _ -> "** Last event = ~p~n"
+ _ -> "** Last event = ~tp~n"
end ++
- "** When server state = ~p~n" ++
- "** Reason for termination = ~w:~p~n" ++
+ "** When server state = ~tp~n" ++
+ "** Reason for termination = ~w:~tp~n" ++
"** Callback mode = ~p~n" ++
case Q of
- [_,_|_] -> "** Queued = ~p~n";
+ [_,_|_] -> "** Queued = ~tp~n";
_ -> ""
end ++
case P of
[] -> "";
- _ -> "** Postponed = ~p~n"
+ _ -> "** Postponed = ~tp~n"
end ++
case FixedStacktrace of
[] -> "";
- _ -> "** Stacktrace =~n** ~p~n"
+ _ -> "** Stacktrace =~n** ~tp~n"
end,
[Name |
case Q of
diff --git a/lib/stdlib/src/otp_internal.erl b/lib/stdlib/src/otp_internal.erl
index 9e9c0dc413..c59db903dc 100644
--- a/lib/stdlib/src/otp_internal.erl
+++ b/lib/stdlib/src/otp_internal.erl
@@ -63,9 +63,9 @@ obsolete_1(gen_fsm, start, 4) ->
{deprecated, {gen_statem, start, 4}};
obsolete_1(gen_fsm, start_link, 3) ->
- {deprecated, {gen_statem, start, 3}};
+ {deprecated, {gen_statem, start_link, 3}};
obsolete_1(gen_fsm, start_link, 4) ->
- {deprecated, {gen_statem, start, 4}};
+ {deprecated, {gen_statem, start_link, 4}};
obsolete_1(gen_fsm, stop, 1) ->
{deprecated, {gen_statem, stop, 1}};
@@ -83,9 +83,9 @@ obsolete_1(gen_fsm, reply, 2) ->
{deprecated, {gen_statem, reply, 2}};
obsolete_1(gen_fsm, send_event, 2) ->
- {deprecated, {gen_statem, cast, 1}};
+ {deprecated, {gen_statem, cast, 2}};
obsolete_1(gen_fsm, send_all_state_event, 2) ->
- {deprecated, {gen_statem, cast, 1}};
+ {deprecated, {gen_statem, cast, 2}};
obsolete_1(gen_fsm, sync_send_event, 2) ->
{deprecated, {gen_statem, call, 2}};
@@ -98,11 +98,11 @@ obsolete_1(gen_fsm, sync_send_all_state_event, 3) ->
{deprecated, {gen_statem, call, 3}};
obsolete_1(gen_fsm, start_timer, 2) ->
- {deprecated, {erlang, start_timer, 2}};
+ {deprecated, {erlang, start_timer, 3}};
obsolete_1(gen_fsm, cancel_timer, 1) ->
{deprecated, {erlang, cancel_timer, 1}};
obsolete_1(gen_fsm, send_event_after, 2) ->
- {deprecated, {erlang, send_after, 2}};
+ {deprecated, {erlang, send_after, 3}};
%% *** CRYPTO added in OTP 20 ***
@@ -112,7 +112,7 @@ obsolete_1(crypto, rand_uniform, 2) ->
%% *** CRYPTO added in OTP 19 ***
obsolete_1(crypto, rand_bytes, 1) ->
- {deprecated, {crypto, strong_rand_bytes, 1}};
+ {removed, {crypto, strong_rand_bytes, 1}, "20.0"};
%% *** CRYPTO added in R16B01 ***
@@ -485,10 +485,6 @@ obsolete_1(wxPaintDC, new, 0) ->
{deprecated,"deprecated function not available in wxWidgets-2.9 and later"};
obsolete_1(wxWindowDC, new, 0) ->
{deprecated,"deprecated function not available in wxWidgets-2.9 and later"};
-obsolete_1(wxGraphicsContext, createLinearGradientBrush, 7) ->
- {deprecated,"deprecated function not available in wxWidgets-2.9 and later"};
-obsolete_1(wxGraphicsContext, createRadialGradientBrush, 8) ->
- {deprecated,"deprecated function not available in wxWidgets-2.9 and later"};
obsolete_1(wxGraphicsRenderer, createLinearGradientBrush, 7) ->
{deprecated,"deprecated function not available in wxWidgets-2.9 and later"};
obsolete_1(wxGraphicsRenderer, createRadialGradientBrush, 8) ->
diff --git a/lib/stdlib/src/proc_lib.erl b/lib/stdlib/src/proc_lib.erl
index 9ce8e7d60e..8e10cbe93b 100644
--- a/lib/stdlib/src/proc_lib.erl
+++ b/lib/stdlib/src/proc_lib.erl
@@ -779,11 +779,13 @@ format_link_report([Link|Reps], Indent, Extra) ->
LinkIndent = [" ",Indent],
[Indent,"neighbour:\n",format_report(Rep, LinkIndent, Extra)|
format_link_report(Reps, Indent, Extra)];
-format_link_report([], _, _) ->
- [].
+format_link_report(Rep, Indent, Extra) ->
+ format_report(Rep, Indent, Extra).
format_report(Rep, Indent, Extra) when is_list(Rep) ->
format_rep(Rep, Indent, Extra);
+format_report(Rep, Indent, {Enc,unlimited}) ->
+ io_lib:format("~s~"++modifier(Enc)++"p~n", [Indent, Rep]);
format_report(Rep, Indent, {Enc,Depth}) ->
io_lib:format("~s~"++modifier(Enc)++"P~n", [Indent, Rep, Depth]).
@@ -821,22 +823,22 @@ to_string(A, _) ->
io_lib:write_atom(A).
pp_fun({Enc,Depth}) ->
- {Letter,Tl} = case Depth of
- unlimited -> {"p",[]};
- _ -> {"P",[Depth]}
- end,
- P = modifier(Enc) ++ Letter,
+ {P,Tl} = p(Enc, Depth),
fun(Term, I) ->
io_lib:format("~." ++ integer_to_list(I) ++ P, [Term|Tl])
end.
-format_tag(Indent, Tag, Data, {_Enc,Depth}) ->
- case Depth of
- unlimited ->
- io_lib:format("~s~p: ~80.18p~n", [Indent, Tag, Data]);
- _ ->
- io_lib:format("~s~p: ~80.18P~n", [Indent, Tag, Data, Depth])
- end.
+format_tag(Indent, Tag, Data, {Enc,Depth}) ->
+ {P,Tl} = p(Enc, Depth),
+ io_lib:format("~s~p: ~80.18" ++ P ++ "\n", [Indent, Tag, Data|Tl]).
+
+p(Encoding, Depth) ->
+ {Letter, Tl} = case Depth of
+ unlimited -> {"p", []};
+ _ -> {"P", [Depth]}
+ end,
+ P = modifier(Encoding) ++ Letter,
+ {P, Tl}.
modifier(latin1) -> "";
modifier(_) -> "t".
diff --git a/lib/stdlib/src/qlc.erl b/lib/stdlib/src/qlc.erl
index 535ca57a6b..f11f9d0a0b 100644
--- a/lib/stdlib/src/qlc.erl
+++ b/lib/stdlib/src/qlc.erl
@@ -1132,7 +1132,7 @@ wait_for_request(Parent, MonRef, Post) ->
wait_for_request(Parent, MonRef, Post);
Other ->
error_logger:error_msg(
- "The qlc cursor ~w received an unexpected message:\n~p\n",
+ "The qlc cursor ~w received an unexpected message:\n~tp\n",
[self(), Other]),
wait_for_request(Parent, MonRef, Post)
end.
diff --git a/lib/stdlib/src/shell.erl b/lib/stdlib/src/shell.erl
index 6eafc7b209..212b143b1d 100644
--- a/lib/stdlib/src/shell.erl
+++ b/lib/stdlib/src/shell.erl
@@ -727,7 +727,7 @@ result_will_be_saved() ->
used_record_defs(E, RT) ->
%% Be careful to return a list where used records come before
%% records that use them. The linter wants them ordered that way.
- UR = case used_records(E, [], RT) of
+ UR = case used_records(E, [], RT, []) of
[] ->
[];
L0 ->
@@ -737,13 +737,19 @@ used_record_defs(E, RT) ->
end,
record_defs(RT, UR).
-used_records(E, U0, RT) ->
+used_records(E, U0, RT, Skip) ->
case used_records(E) of
{name,Name,E1} ->
- U = used_records(ets:lookup(RT, Name), [Name | U0], RT),
- used_records(E1, U, RT);
+ U = case lists:member(Name, Skip) of
+ true ->
+ U0;
+ false ->
+ R = ets:lookup(RT, Name),
+ used_records(R, [Name | U0], RT, [Name | Skip])
+ end,
+ used_records(E1, U, RT, Skip);
{expr,[E1 | Es]} ->
- used_records(Es, used_records(E1, U0, RT), RT);
+ used_records(Es, used_records(E1, U0, RT, Skip), RT, Skip);
_ ->
U0
end.
@@ -1453,7 +1459,7 @@ check_env(V) ->
{ok, Val} ->
Txt = io_lib:fwrite
("Invalid value of STDLIB configuration parameter"
- "~w: ~tp\n", [V, Val]),
+ "~tw: ~tp\n", [V, Val]),
error_logger:info_report(lists:flatten(Txt))
end.
diff --git a/lib/stdlib/src/slave.erl b/lib/stdlib/src/slave.erl
index 5b5c328c0c..d7cf6386f5 100644
--- a/lib/stdlib/src/slave.erl
+++ b/lib/stdlib/src/slave.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -77,7 +77,7 @@ start_pseudo(_,_,_) -> ok. %% It's already there
Pid :: pid().
relay({badrpc,Reason}) ->
- error_msg(" ** exiting relay server ~w :~w **~n", [self(),Reason]),
+ error_msg(" ** exiting relay server ~w :~tw **~n", [self(),Reason]),
exit(Reason);
relay(undefined) ->
error_msg(" ** exiting relay server ~w **~n", [self()]),
diff --git a/lib/stdlib/src/string.erl b/lib/stdlib/src/string.erl
index 6f7009b5d9..4972da297d 100644
--- a/lib/stdlib/src/string.erl
+++ b/lib/stdlib/src/string.erl
@@ -384,7 +384,7 @@ to_float(String) ->
end.
to_number(String, Number, Rest, List, _Tail) when is_binary(String) ->
- BSz = length(List)-length(Rest),
+ BSz = erlang:length(List)-erlang:length(Rest),
<<_:BSz/binary, Cont/binary>> = String,
{Number, Cont};
to_number(_, Number, Rest, _, Tail) ->
@@ -1344,7 +1344,7 @@ bin_search_str(Bin0, Start, Cont, [CP|_]=SearchCPs) ->
String :: string(),
Length :: non_neg_integer().
-len(S) -> length(S).
+len(S) -> erlang:length(S).
%% equal(String1, String2)
%% Test if 2 strings are equal.
@@ -1689,7 +1689,7 @@ left(String, Len) when is_integer(Len) -> left(String, Len, $\s).
Character :: char().
left(String, Len, Char) when is_integer(Char) ->
- Slen = length(String),
+ Slen = erlang:length(String),
if
Slen > Len -> substr(String, 1, Len);
Slen < Len -> l_pad(String, Len-Slen, Char);
@@ -1714,7 +1714,7 @@ right(String, Len) when is_integer(Len) -> right(String, Len, $\s).
Character :: char().
right(String, Len, Char) when is_integer(Char) ->
- Slen = length(String),
+ Slen = erlang:length(String),
if
Slen > Len -> substr(String, Slen-Len+1);
Slen < Len -> r_pad(String, Len-Slen, Char);
@@ -1741,7 +1741,7 @@ centre(String, Len) when is_integer(Len) -> centre(String, Len, $\s).
centre(String, 0, Char) when is_list(String), is_integer(Char) ->
[]; % Strange cases to centre string
centre(String, Len, Char) when is_integer(Char) ->
- Slen = length(String),
+ Slen = erlang:length(String),
if
Slen > Len -> substr(String, (Slen-Len) div 2 + 1, Len);
Slen < Len ->
diff --git a/lib/stdlib/src/supervisor.erl b/lib/stdlib/src/supervisor.erl
index 1cd65fbf18..7920e55930 100644
--- a/lib/stdlib/src/supervisor.erl
+++ b/lib/stdlib/src/supervisor.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -628,7 +628,7 @@ handle_info({'EXIT', Pid, Reason}, State) ->
end;
handle_info(Msg, State) ->
- error_logger:error_msg("Supervisor received unexpected message: ~p~n",
+ error_logger:error_msg("Supervisor received unexpected message: ~tp~n",
[Msg]),
{noreply, State}.
diff --git a/lib/stdlib/test/Makefile b/lib/stdlib/test/Makefile
index 72211332e9..7b79dcf04d 100644
--- a/lib/stdlib/test/Makefile
+++ b/lib/stdlib/test/Makefile
@@ -36,6 +36,7 @@ MODULES= \
ets_tough_SUITE \
expand_test \
expand_test1 \
+ unicode_expand \
ExpandTestCaps \
ExpandTestCaps1 \
filelib_SUITE \
diff --git a/lib/stdlib/test/c_SUITE.erl b/lib/stdlib/test/c_SUITE.erl
index 4bd32a30f8..f01988478c 100644
--- a/lib/stdlib/test/c_SUITE.erl
+++ b/lib/stdlib/test/c_SUITE.erl
@@ -21,7 +21,9 @@
-export([all/0, suite/0,groups/0,init_per_suite/1, end_per_suite/1,
init_per_group/2,end_per_group/2]).
-export([c_1/1, c_2/1, c_3/1, c_4/1, nc_1/1, nc_2/1, nc_3/1, nc_4/1,
- ls/1, memory/1]).
+ c_default_outdir_1/1, c_default_outdir_2/1,
+ nc_default_outdir_1/1, nc_default_outdir_2/1,
+ ls/1, memory/1]).
-include_lib("common_test/include/ct.hrl").
@@ -30,7 +32,10 @@
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [c_1, c_2, c_3, c_4, nc_1, nc_2, nc_3, nc_4, ls, memory].
+ [c_1, c_2, c_3, c_4, nc_1, nc_2, nc_3, nc_4,
+ c_default_outdir_1, c_default_outdir_2,
+ nc_default_outdir_1, nc_default_outdir_2,
+ ls, memory].
groups() ->
[].
@@ -124,6 +129,50 @@ nc_4(Config) when is_list(Config) ->
Result = nc(R,[{outdir,W}]),
{ok, m} = Result.
+c_default_outdir_1(Config) ->
+ R = filename:join(proplists:get_value(data_dir, Config), "m.erl"),
+ W = proplists:get_value(priv_dir, Config),
+ file:set_cwd(W),
+ Obj = "m" ++ code:objfile_extension(),
+ _ = file:delete(Obj),
+ false = filelib:is_file(Obj),
+ Result = c:c(R),
+ {ok, m} = Result,
+ true = filelib:is_file(Obj).
+
+c_default_outdir_2(Config) ->
+ R = filename:join(proplists:get_value(data_dir, Config), "m"),
+ W = proplists:get_value(priv_dir, Config),
+ file:set_cwd(W),
+ Obj = "m" ++ code:objfile_extension(),
+ _ = file:delete(Obj),
+ false = filelib:is_file(Obj),
+ Result = c:c(R),
+ {ok, m} = Result,
+ true = filelib:is_file(Obj).
+
+nc_default_outdir_1(Config) ->
+ R = filename:join(proplists:get_value(data_dir, Config), "m.erl"),
+ W = proplists:get_value(priv_dir, Config),
+ file:set_cwd(W),
+ Obj = "m" ++ code:objfile_extension(),
+ _ = file:delete(Obj),
+ false = filelib:is_file(Obj),
+ Result = c:nc(R),
+ {ok, m} = Result,
+ true = filelib:is_file(Obj).
+
+nc_default_outdir_2(Config) ->
+ R = filename:join(proplists:get_value(data_dir, Config), "m"),
+ W = proplists:get_value(priv_dir, Config),
+ file:set_cwd(W),
+ Obj = "m" ++ code:objfile_extension(),
+ _ = file:delete(Obj),
+ false = filelib:is_file(Obj),
+ Result = c:nc(R),
+ {ok, m} = Result,
+ true = filelib:is_file(Obj).
+
ls(Config) when is_list(Config) ->
Directory = proplists:get_value(data_dir, Config),
ok = c:ls(Directory),
diff --git a/lib/stdlib/test/edlin_expand_SUITE.erl b/lib/stdlib/test/edlin_expand_SUITE.erl
index 1f694ea549..5c2b1965ba 100644
--- a/lib/stdlib/test/edlin_expand_SUITE.erl
+++ b/lib/stdlib/test/edlin_expand_SUITE.erl
@@ -22,7 +22,7 @@
init_per_testcase/2, end_per_testcase/2,
init_per_group/2,end_per_group/2]).
-export([normal/1, quoted_fun/1, quoted_module/1, quoted_both/1, erl_1152/1,
- erl_352/1]).
+ erl_352/1, unicode/1]).
-include_lib("common_test/include/ct.hrl").
@@ -37,7 +37,8 @@ suite() ->
{timetrap,{minutes,1}}].
all() ->
- [normal, quoted_fun, quoted_module, quoted_both, erl_1152, erl_352].
+ [normal, quoted_fun, quoted_module, quoted_both, erl_1152, erl_352,
+ unicode].
groups() ->
[].
@@ -150,6 +151,7 @@ quoted_both(Config) when is_list(Config) ->
{yes,"weird-fun-name'()",[]} = do_expand("'ExpandTestCaps1':'#"),
ok.
+%% Note: pull request #1152.
erl_1152(Config) when is_list(Config) ->
"\n"++"foo"++" "++[1089]++_ = do_format(["foo",[1089]]),
ok.
@@ -226,6 +228,26 @@ check_trailing([I|Str], ArityStr, Suffix, Dots) ->
Rest =:= Suffix
end.
+unicode(Config) when is_list(Config) ->
+ {module,unicode_expand} = c:l('unicode_expand'),
+ {no,[],[{"'кlирилли́ческий атом'",0},
+ {"'кlирилли́ческий атом'",1},
+ {"'кlирилли́ческий атомB'",1},
+ {"module_info",0},
+ {"module_info",1}]} = do_expand("unicode_expand:"),
+ {yes,"рилли́ческий атом", []} = do_expand("unicode_expand:'кlи"),
+ {yes,"еский атом", []} = do_expand("unicode_expand:'кlирилли́ч"),
+ {yes,"(",[]} = do_expand("unicode_expand:'кlирилли́ческий атомB'"),
+ "\n'кlирилли́ческий атом'/0 'кlирилли́ческий атом'/1 "
+ "'кlирилли́ческий атомB'/1 \nmodule_info/0 "
+ "module_info/1 \n" =
+ do_format([{"'кlирилли́ческий атом'",0},
+ {"'кlирилли́ческий атом'",1},
+ {"'кlирилли́ческий атомB'",1},
+ {"module_info",0},
+ {"module_info",1}]),
+ ok.
+
do_expand(String) ->
edlin_expand:expand(lists:reverse(String)).
diff --git a/lib/stdlib/test/erl_lint_SUITE.erl b/lib/stdlib/test/erl_lint_SUITE.erl
index 6a75eaa737..b76bece07f 100644
--- a/lib/stdlib/test/erl_lint_SUITE.erl
+++ b/lib/stdlib/test/erl_lint_SUITE.erl
@@ -66,7 +66,7 @@
otp_11851/1,otp_11879/1,otp_13230/1,
record_errors/1, otp_11879_cont/1,
non_latin1_module/1, otp_14323/1,
- get_stacktrace/1, otp_14285/1]).
+ get_stacktrace/1, otp_14285/1, otp_14378/1]).
suite() ->
[{ct_hooks,[ts_install_cth]},
@@ -87,7 +87,7 @@ all() ->
maps, maps_type, maps_parallel_match,
otp_11851, otp_11879, otp_13230,
record_errors, otp_11879_cont, non_latin1_module, otp_14323,
- get_stacktrace, otp_14285].
+ get_stacktrace, otp_14285, otp_14378].
groups() ->
[{unused_vars_warn, [],
@@ -2054,12 +2054,10 @@ otp_5362(Config) when is_list(Config) ->
spawn(A).
">>,
{[nowarn_unused_function]},
- {error,[{3,erl_lint,disallowed_nowarn_bif_clash},
- {4,erl_lint,disallowed_nowarn_bif_clash},
- {4,erl_lint,{bad_nowarn_bif_clash,{spawn,2}}}],
- [{5,erl_lint,{bad_nowarn_deprecated_function,{3,now,-1}}},
- {5,erl_lint,{bad_nowarn_deprecated_function,{erlang,now,-1}}},
- {5,erl_lint,{bad_nowarn_deprecated_function,{{a,b,c},now,-1}}}]}
+ {errors,[{3,erl_lint,disallowed_nowarn_bif_clash},
+ {4,erl_lint,disallowed_nowarn_bif_clash},
+ {4,erl_lint,{bad_nowarn_bif_clash,{spawn,2}}}],
+ []}
},
{otp_5362_8,
@@ -3937,10 +3935,6 @@ non_latin1_module(Config) ->
UndefBehav = {undefined_behaviour,'кирилли́ческий атом'},
"behaviour 'кирилли́ческий атом' undefined" =
format_error(UndefBehav),
- BadDepr = {bad_nowarn_deprecated_function,
- {'кирилли́ческий атом','кирилли́ческий атом',18}},
- "'кирилли́ческий атом':'кирилли́ческий атом'/18 is not a deprecated "
- "function" = format_error(BadDepr),
Ts = [{non_latin1_module,
<<"
%% Report uses of module names with non-Latin-1 characters.
@@ -3951,9 +3945,6 @@ non_latin1_module(Config) ->
-callback 'кирилли́ческий атом':'кирилли́ческий атом'() -> a.
- -compile([{nowarn_deprecated_function,
- [{'кирилли́ческий атом','кирилли́ческий атом',18}]}]).
-
%% erl_lint:gexpr/3 is not extended to check module name here:
t1() when 'кирилли́ческий атом':'кирилли́ческий атом'(1) ->
b.
@@ -3977,16 +3968,14 @@ non_latin1_module(Config) ->
{6,erl_lint,non_latin1_module_unsupported},
{8,erl_lint,non_latin1_module_unsupported},
{8,erl_lint,BadCallback},
- {10,erl_lint,non_latin1_module_unsupported},
- {14,erl_lint,illegal_guard_expr},
- {18,erl_lint,non_latin1_module_unsupported},
+ {11,erl_lint,illegal_guard_expr},
+ {15,erl_lint,non_latin1_module_unsupported},
+ {17,erl_lint,non_latin1_module_unsupported},
{20,erl_lint,non_latin1_module_unsupported},
{23,erl_lint,non_latin1_module_unsupported},
- {26,erl_lint,non_latin1_module_unsupported},
- {28,erl_lint,non_latin1_module_unsupported}],
+ {25,erl_lint,non_latin1_module_unsupported}],
[{5,erl_lint,UndefBehav},
- {6,erl_lint,UndefBehav},
- {10,erl_lint,BadDepr}]}}],
+ {6,erl_lint,UndefBehav}]}}],
run(Config, Ts),
ok.
@@ -4000,6 +3989,22 @@ do_non_latin1_module(Mod) ->
ok.
+otp_14378(Config) ->
+ Ts = [
+ {otp_14378_1,
+ <<"-export([t/0]).
+ -compile({nowarn_deprecated_function,{erlang,now,1}}).
+ t() ->
+ erlang:now().">>,
+ [],
+ {warnings,[{4,erl_lint,
+ {deprecated,{erlang,now,0},
+ "Deprecated BIF. See the \"Time and Time Correction"
+ " in Erlang\" chapter of the ERTS User's Guide"
+ " for more information."}}]}}],
+ [] = run(Config, Ts),
+ ok.
+
%% OTP-14323: Check the dialyzer attribute.
otp_14323(Config) ->
Ts = [
@@ -4099,7 +4104,27 @@ get_stacktrace(Config) ->
[],
{warnings,[{4,erl_lint,{get_stacktrace,wrong_part_of_try}},
{13,erl_lint,{get_stacktrace,after_try}},
- {22,erl_lint,{get_stacktrace,after_try}}]}}],
+ {22,erl_lint,{get_stacktrace,after_try}}]}},
+ {multiple_catch_clauses,
+ <<"maybe_error(Arg) ->
+ try 5 / Arg
+ catch
+ error:badarith ->
+ _Stacktrace = erlang:get_stacktrace(),
+ try io:nl()
+ catch
+ error:_ -> io:format('internal error')
+ end;
+ error:badarg ->
+ _Stacktrace = erlang:get_stacktrace(),
+ try io:format(qwe)
+ catch
+ error:_ -> io:format('internal error')
+ end
+ end.
+ ">>,
+ [],
+ []}],
run(Config, Ts),
ok.
diff --git a/lib/stdlib/test/error_logger_h_SUITE.erl b/lib/stdlib/test/error_logger_h_SUITE.erl
index 30f96e0522..1f2a9fda0b 100644
--- a/lib/stdlib/test/error_logger_h_SUITE.erl
+++ b/lib/stdlib/test/error_logger_h_SUITE.erl
@@ -162,7 +162,7 @@ tty_log_open(Log) ->
{ok,D} -> D;
_ -> unlimited
end,
- error_logger:add_report_handler(?MODULE, {Fd,Depth}),
+ error_logger:add_report_handler(?MODULE, {Fd,Depth,latin1}),
Fd.
tty_log_close() ->
@@ -393,11 +393,11 @@ dl_format_1([], [], _, Facc, Aacc) ->
%%% calling error_logger_tty_h:write_event/2.
%%%
-init({_,_}=St) ->
+init({_,_,_}=St) ->
{ok,St}.
-handle_event(Event, {Fd,Depth}=St) ->
- case error_logger_tty_h:write_event(tag_event(Event), io_lib, Depth) of
+handle_event(Event, {Fd,Depth,Enc}=St) ->
+ case error_logger_tty_h:write_event(tag_event(Event), io_lib, {Depth,Enc}) of
ok ->
ok;
Str when is_list(Str) ->
@@ -405,7 +405,7 @@ handle_event(Event, {Fd,Depth}=St) ->
end,
{ok,St}.
-terminate(_Reason, {Fd,_}) ->
+terminate(_Reason, {Fd,_,_}) ->
ok = file:close(Fd),
[].
diff --git a/lib/stdlib/test/id_transform_SUITE.erl b/lib/stdlib/test/id_transform_SUITE.erl
index 3d4ae1a189..186df41d3f 100644
--- a/lib/stdlib/test/id_transform_SUITE.erl
+++ b/lib/stdlib/test/id_transform_SUITE.erl
@@ -61,8 +61,13 @@ id_transform(Config) when is_list(Config) ->
"erl_id_trans.erl"]),
{ok,erl_id_trans,Bin} = compile:file(File,[binary]),
{module,erl_id_trans} = code:load_binary(erl_id_trans, File, Bin),
- ct:timetrap({hours,1}),
- run_in_test_suite().
+ case test_server:is_valgrind() of
+ false ->
+ ct:timetrap({hours,1}),
+ run_in_test_suite();
+ true ->
+ {skip,"Valgrind (too slow)"}
+ end.
run_in_test_suite() ->
SuperDir = filename:dirname(filename:dirname(code:which(?MODULE))),
diff --git a/lib/stdlib/test/proc_lib_SUITE.erl b/lib/stdlib/test/proc_lib_SUITE.erl
index 029e6286e4..7686889360 100644
--- a/lib/stdlib/test/proc_lib_SUITE.erl
+++ b/lib/stdlib/test/proc_lib_SUITE.erl
@@ -28,7 +28,7 @@
init_per_group/2,end_per_group/2,
crash/1, stacktrace/1, sync_start_nolink/1, sync_start_link/1,
spawn_opt/1, sp1/0, sp2/0, sp3/1, sp4/2, sp5/1, '\x{447}'/0,
- hibernate/1, stop/1, t_format/1]).
+ hibernate/1, stop/1, t_format/1, t_format_arbitrary/1]).
-export([ otp_6345/1, init_dont_hang/1]).
-export([hib_loop/1, awaken/1]).
@@ -51,7 +51,7 @@ suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
[crash, stacktrace, {group, sync_start}, spawn_opt, hibernate,
- {group, tickets}, stop, t_format].
+ {group, tickets}, stop, t_format, t_format_arbitrary].
groups() ->
[{tickets, [], [otp_6345, init_dont_hang]},
@@ -78,6 +78,14 @@ end_per_group(_GroupName, Config) ->
%% synchronous, and we want to test that the crash report is ok.
%%-----------------------------------------------------------------
crash(Config) when is_list(Config) ->
+ ok = application:unset_env(kernel, error_logger_format_depth),
+ crash_1(Config),
+ ok = application:set_env(kernel, error_logger_format_depth, 30),
+ crash_1(Config),
+ ok = application:unset_env(kernel, error_logger_format_depth),
+ ok.
+
+crash_1(_Config) ->
error_logger:add_report_handler(?MODULE, self()),
%% Make sure that we don't get a crash report if a process
@@ -544,14 +552,17 @@ t_format(_Config) ->
t_format() ->
error_logger:add_report_handler(?MODULE, self()),
- Pid = proc_lib:spawn(fun t_format_looper/0),
+ Pid = proc_lib:spawn(fun '\x{aaa}t_format_looper'/0),
HugeData = gb_sets:from_list(lists:seq(1, 100)),
- Pid ! {die,HugeData},
+ SomeData1 = list_to_atom([246]),
+ SomeData2 = list_to_atom([1024]),
+ Pid ! {SomeData1,SomeData2},
+ Pid ! {die,{HugeData,SomeData1,SomeData2}},
Report = receive
{crash_report, Pid, Report0} -> Report0
end,
- Usz = do_test_format(Report, unlimited),
- Tsz = do_test_format(Report, 20),
+ Usz = do_test_format(Report, latin1, unlimited),
+ Tsz = do_test_format(Report, latin1, 20),
if
Tsz >= Usz ->
@@ -560,21 +571,58 @@ t_format() ->
ok
end,
+ UszU = do_test_format(Report, unicode, unlimited),
+ TszU = do_test_format(Report, unicode, 20),
+
+ if
+ TszU >= UszU ->
+ ct:fail(failed);
+ true ->
+ ok
+ end,
+
ok.
+t_format_arbitrary(_Config) ->
+ error_logger:tty(false),
+ try
+ t_format_arbitrary()
+ after
+ error_logger:tty(true)
+ end,
+ ok.
+
+t_format_arbitrary() ->
+ A = list_to_atom([1024]),
+ do_test_format([fake_report, A], unlimited),
+ do_test_format([fake_report, A], 20),
+
+ do_test_format([fake_report, foo], unlimited),
+ do_test_format([fake_report, foo], 20),
+ do_test_format([fake_report, []], unlimited),
+ do_test_format([fake_report, []], 20).
+
do_test_format(Report, Depth) ->
- io:format("*** Depth = ~p", [Depth]),
- S0 = proc_lib:format(Report, latin1, Depth),
+ do_test_format(Report, latin1, Depth),
+ do_test_format(Report, unicode, Depth).
+
+do_test_format(Report, Encoding, Depth) ->
+ io:format("*** Depth = ~p, Encoding = ~p", [Depth, Encoding]),
+ S0 = proc_lib:format(Report, Encoding, Depth),
S = lists:flatten(S0),
- io:put_chars(S),
+ case Encoding of
+ latin1 -> io:format("~s\n", [S]);
+ _ -> io:format("~ts\n", [S])
+ end,
length(S).
-t_format_looper() ->
+'\x{aaa}t_format_looper'() ->
receive
{die,Data} ->
exit(Data);
- _ ->
- t_format_looper()
+ M ->
+ put(M, M),
+ '\x{aaa}t_format_looper'()
end.
%%-----------------------------------------------------------------
@@ -584,7 +632,7 @@ init(Tester) ->
{ok, Tester}.
handle_event({error_report, _GL, {Pid, crash_report, Report}}, Tester) ->
- io:format("~s\n", [proc_lib:format(Report)]),
+ io:format("~ts\n", [proc_lib:format(Report)]),
Tester ! {crash_report, Pid, Report},
{ok, Tester};
handle_event(_Event, State) ->
diff --git a/lib/stdlib/test/re_SUITE_data/testoutput1 b/lib/stdlib/test/re_SUITE_data/testoutput1
index a2b3cffe9d..eff8ecc948 100644
--- a/lib/stdlib/test/re_SUITE_data/testoutput1
+++ b/lib/stdlib/test/re_SUITE_data/testoutput1
@@ -9442,4 +9442,8 @@ No match
\ X
0: X
+/X+(?#comment)?/
+ >XXX<
+ 0: X
+
/-- End of testinput1 --/
diff --git a/lib/stdlib/test/re_SUITE_data/testoutput8 b/lib/stdlib/test/re_SUITE_data/testoutput8
index 17b667a980..4984376d3c 100644
--- a/lib/stdlib/test/re_SUITE_data/testoutput8
+++ b/lib/stdlib/test/re_SUITE_data/testoutput8
@@ -7801,4 +7801,8 @@ No match
** Show all captures ignored after DFA matching
0: a
+/(02-)?[0-9]{3}-[0-9]{3}/
+ 02-123-123
+ 0: 02-123-123
+
/-- End of testinput8 --/
diff --git a/lib/stdlib/test/shell_SUITE.erl b/lib/stdlib/test/shell_SUITE.erl
index 4f0fdc4c6a..217e8cc252 100644
--- a/lib/stdlib/test/shell_SUITE.erl
+++ b/lib/stdlib/test/shell_SUITE.erl
@@ -31,7 +31,7 @@
progex_lc/1, progex_funs/1,
otp_5990/1, otp_6166/1, otp_6554/1,
otp_7184/1, otp_7232/1, otp_8393/1, otp_10302/1, otp_13719/1,
- otp_14285/1, otp_14296/1]).
+ otp_14285/1, otp_14296/1, typed_records/1]).
-export([ start_restricted_from_shell/1,
start_restricted_on_command_line/1,restricted_local/1]).
@@ -74,10 +74,10 @@ suite() ->
{timetrap,{minutes,10}}].
all() ->
- [forget, records, known_bugs, otp_5226, otp_5327,
+ [forget, known_bugs, otp_5226, otp_5327,
otp_5435, otp_5195, otp_5915, otp_5916, {group, bits},
{group, refman}, {group, progex}, {group, tickets},
- {group, restricted}].
+ {group, restricted}, {group, records}].
groups() ->
[{restricted, [],
@@ -86,6 +86,8 @@ groups() ->
{bits, [],
[bs_match_misc_SUITE, bs_match_tail_SUITE,
bs_match_bin_SUITE, bs_construct_SUITE]},
+ {records, [],
+ [records, typed_records]},
{refman, [], [refman_bit_syntax]},
{progex, [],
[progex_bit_syntax, progex_records, progex_lc,
@@ -486,6 +488,48 @@ records(Config) when is_list(Config) ->
ok.
+%% Test of typed record support.
+typed_records(Config) when is_list(Config) ->
+ Test = filename:join(proplists:get_value(priv_dir, Config), "test.hrl"),
+ Contents = <<"-module(test).
+ -record(r0,{f :: any()}).
+ -record(r1,{f1 :: #r1{} | undefined, f2 :: #r0{} | atom()}).
+ -record(r2,{f :: #r2{} | undefined}).
+ ">>,
+ ok = file:write_file(Test, Contents),
+
+ RR1 = "rr(\"" ++ Test ++ "\"),
+ #r1{} = (#r1{f1=#r1{f1=undefined, f2=x}, f2 = #r0{}})#r1.f1,
+ ok.",
+ RR2 = "rr(\"" ++ Test ++ "\"),
+ #r0{} = (#r1{f1=#r1{f1=undefined, f2=x}, f2 = #r0{}})#r1.f2,
+ ok. ",
+ RR3 = "rr(\"" ++ Test ++ "\"),
+ #r1{f2=#r0{}} = (#r1{f1=#r1{f1=undefined, f2=#r0{}}, f2 = x})#r1.f1,
+ ok.",
+ RR4 = "rr(\"" ++ Test ++ "\"),
+ (#r1{f2 = #r0{}})#r1{f2 = x},
+ ok. ",
+ RR5 = "rr(\"" ++ Test ++ "\"),
+ (#r1{f2 = #r0{}})#r1{f1 = #r1{}},
+ ok. ",
+ RR6 = "rr(\"" ++ Test ++ "\"),
+ (#r2{f=#r2{f=undefined}})#r2.f,
+ ok.",
+ RR7 = "rr(\"" ++ Test ++ "\"),
+ #r2{} = (#r2{f=#r2{f=undefined}})#r2.f,
+ ok.",
+ [ok] = scan(RR1),
+ [ok] = scan(RR2),
+ [ok] = scan(RR3),
+ [ok] = scan(RR4),
+ [ok] = scan(RR5),
+ [ok] = scan(RR6),
+ [ok] = scan(RR7),
+
+ file:delete(Test),
+ ok.
+
%% Known bugs.
known_bugs(Config) when is_list(Config) ->
%% erl_eval:merge_bindings/2 cannot handle _removal_ of bindings.
diff --git a/lib/stdlib/test/unicode_expand.erl b/lib/stdlib/test/unicode_expand.erl
new file mode 100644
index 0000000000..41f741fa84
--- /dev/null
+++ b/lib/stdlib/test/unicode_expand.erl
@@ -0,0 +1,36 @@
+%%
+%% %CopyrightBegin%
+%%
+%% Copyright Ericsson AB 2017. All Rights Reserved.
+%%
+%% Licensed under the Apache License, Version 2.0 (the "License");
+%% you may not use this file except in compliance with the License.
+%% You may obtain a copy of the License at
+%%
+%% http://www.apache.org/licenses/LICENSE-2.0
+%%
+%% Unless required by applicable law or agreed to in writing, software
+%% distributed under the License is distributed on an "AS IS" BASIS,
+%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+%% See the License for the specific language governing permissions and
+%% limitations under the License.
+%%
+%% %CopyrightEnd%
+%%
+-module(unicode_expand).
+
+-export(['кlирилли́ческий атом'/0, 'кlирилли́ческий атом'/1,
+ 'кlирилли́ческий атомB'/1]).
+
+-export_type(['кlирилли́ческий атом'/0]).
+
+-type 'кlирилли́ческий атом'() :: integer().
+
+'кlирилли́ческий атом'() ->
+ 'кlирилли́ческий атом'('кlирилли́ческий атом').
+
+'кlирилли́ческий атом'(_Atom) ->
+ ok.
+
+'кlирилли́ческий атомB'(_B) ->
+ true.
diff --git a/lib/stdlib/vsn.mk b/lib/stdlib/vsn.mk
index f062c7fe6e..8a83cdec1e 100644
--- a/lib/stdlib/vsn.mk
+++ b/lib/stdlib/vsn.mk
@@ -1 +1 @@
-STDLIB_VSN = 3.4
+STDLIB_VSN = 3.4.1
diff --git a/lib/syntax_tools/src/epp_dodger.erl b/lib/syntax_tools/src/epp_dodger.erl
index cf1ba0abfa..0a12e8fd8b 100644
--- a/lib/syntax_tools/src/epp_dodger.erl
+++ b/lib/syntax_tools/src/epp_dodger.erl
@@ -866,10 +866,10 @@ tokens_to_string([]) ->
format_error(macro_args) ->
errormsg("macro call missing end parenthesis");
format_error({unknown, Reason}) ->
- errormsg(io_lib:format("unknown error: ~P", [Reason, 15])).
+ errormsg(io_lib:format("unknown error: ~tP", [Reason, 15])).
errormsg(String) ->
- io_lib:format("~s: ~s", [?MODULE, String]).
+ io_lib:format("~s: ~ts", [?MODULE, String]).
%% =====================================================================
diff --git a/lib/syntax_tools/src/erl_comment_scan.erl b/lib/syntax_tools/src/erl_comment_scan.erl
index a7a2c10b79..07e501e553 100644
--- a/lib/syntax_tools/src/erl_comment_scan.erl
+++ b/lib/syntax_tools/src/erl_comment_scan.erl
@@ -309,7 +309,7 @@ filename([C|T]) when is_integer(C), C > 0 ->
filename([]) ->
[];
filename(N) ->
- report_error("bad filename: `~P'.", [N, 25]),
+ report_error("bad filename: `~tP'.", [N, 25]),
exit(error).
error_read_file(Name) ->
diff --git a/lib/syntax_tools/src/erl_tidy.erl b/lib/syntax_tools/src/erl_tidy.erl
index 1ca60ea73b..5623aa6af3 100644
--- a/lib/syntax_tools/src/erl_tidy.erl
+++ b/lib/syntax_tools/src/erl_tidy.erl
@@ -193,7 +193,7 @@ dir_3(Name, Dir, Regexp, Env) ->
dir_1(Dir1, Regexp, Env).
dir_4(File, Regexp, Env) ->
- case re:run(File, Regexp) of
+ case re:run(File, Regexp, [unicode]) of
{match, _} ->
Opts = [{outfile, File}, {dir, ""} | Env#dir.options],
case catch file(File, Opts) of
@@ -301,6 +301,8 @@ file(Name, Opts) ->
{Child, ok} ->
ok;
{Child, {error, Reason}} ->
+ exit(Reason);
+ {'EXIT', Child, Reason} ->
exit(Reason)
end.
@@ -803,7 +805,7 @@ keep_form(Form, Used, Opts) ->
{F, A} = N,
File = proplists:get_value(file, Opts, ""),
report({File, erl_syntax:get_pos(Form),
- "removing unused function `~w/~w'."},
+ "removing unused function `~tw/~w'."},
[F, A], Opts),
false;
true ->
@@ -868,8 +870,8 @@ update_attribute(F, Imports, Opts) ->
Names ->
File = proplists:get_value(file, Opts, ""),
report({File, erl_syntax:get_pos(F),
- "removing unused imports:~s"},
- [[io_lib:fwrite("\n\t`~w:~w/~w'", [M, N, A])
+ "removing unused imports:~ts"},
+ [[io_lib:fwrite("\n\t`~w:~tw/~w'", [M, N, A])
|| {N, A} <- Names]], Opts)
end,
Is = [make_fname(N) || N <- Ns1],
@@ -1164,7 +1166,7 @@ visit_import_application({N, A} = Name, F, As, Tree, Env, St0) ->
case Expand of
true ->
report({Env#env.file, erl_syntax:get_pos(F),
- "expanding call to imported function `~w:~w/~w'."},
+ "expanding call to imported function `~w:~tw/~w'."},
[M, N, A], Env#env.verbosity),
F1 = erl_syntax:module_qualifier(erl_syntax:atom(M),
erl_syntax:atom(N)),
@@ -1218,7 +1220,7 @@ visit_spawn_call({N, A}, F, Ps, [A1, A2, A3] = As, Tree,
case erl_syntax:is_proper_list(A3) of
true ->
report({Env#env.file, erl_syntax:get_pos(F),
- "changing use of `~w/~w' to `~w/~w' with a fun."},
+ "changing use of `~tw/~w' to `~tw/~w' with a fun."},
[N, A, N, 1 + length(Ps)], Env#env.verbosity),
F1 = case erl_syntax:is_atom(A1, Env#env.module) of
true ->
@@ -1402,8 +1404,8 @@ visit_remote_application({M, N, A} = Name, F, As, Tree, Env, St) ->
case rename_remote_call(Name, St) of
{M1, N1} ->
report({Env#env.file, erl_syntax:get_pos(F),
- "updating obsolete call to `~w:~w/~w' "
- "to use `~w:~w/~w' instead."},
+ "updating obsolete call to `~w:~tw/~w' "
+ "to use `~w:~tw/~w' instead."},
[M, N, A, M1, N1, A], Env#env.verbosity),
M2 = erl_syntax:atom(M1),
N2 = erl_syntax:atom(N1),
@@ -1818,7 +1820,7 @@ filename([]) ->
filename(N) when is_atom(N) ->
atom_to_list(N);
filename(N) ->
- report_error("bad filename: `~P'.", [N, 25]),
+ report_error("bad filename: `~tP'.", [N, 25]),
exit(error).
get_env(Tree) ->
@@ -1909,11 +1911,11 @@ format({warning, D}, Vs) ->
format({recommend, D}, Vs) ->
["recommendation: ", format(D, Vs)];
format({"", L, D}, Vs) when is_integer(L), L > 0 ->
- [io_lib:fwrite("~w: ", [L]), format(D, Vs)];
+ [io_lib:fwrite("~tw: ", [L]), format(D, Vs)];
format({"", _L, D}, Vs) ->
format(D, Vs);
format({F, L, D}, Vs) when is_integer(L), L > 0 ->
- [io_lib:fwrite("~ts:~w: ", [filename(F), L]), format(D, Vs)];
+ [io_lib:fwrite("~ts:~tw: ", [filename(F), L]), format(D, Vs)];
format({F, _L, D}, Vs) ->
[io_lib:fwrite("~ts: ", [filename(F)]), format(D, Vs)];
format(S, Vs) when is_list(S) ->
diff --git a/lib/syntax_tools/src/igor.erl b/lib/syntax_tools/src/igor.erl
index b92cd8d607..16e3511734 100644
--- a/lib/syntax_tools/src/igor.erl
+++ b/lib/syntax_tools/src/igor.erl
@@ -834,7 +834,7 @@ merge_sources_1(Name, Modules, Trees, Opts) ->
dict:from_list(Rs);
false ->
report_error("bad value for `redirect' option: "
- "~P.",
+ "~tP.",
[Rs, 10]),
exit(error)
end,
@@ -1069,7 +1069,7 @@ filter_forms_2(Forms, Env) ->
comment -> kill;
_ ->
report_error("invalid value for option "
- "`file_attributes': ~w.",
+ "`file_attributes': ~tw.",
[FileAttrsOpt]),
exit(error)
end,
@@ -1180,7 +1180,7 @@ merge_namespaces(Modules, Env) ->
[] ->
ok;
Fs ->
- report_warning("interface functions renamed:\n\t~p.", [Fs])
+ report_warning("interface functions renamed:\n\t~tp.", [Fs])
end,
{M4, Acc2} = merge_namespaces_1(M2, Acc1),
Ms = M3 ++ M4,
@@ -1778,7 +1778,7 @@ transform_function(T, Env, St) ->
{maybe_modified(V, T1, 2, Text, Env), St1}.
renaming_note(Name) ->
- [lists:flatten(io_lib:fwrite("renamed function to `~w'",
+ [lists:flatten(io_lib:fwrite("renamed function to `~tw'",
[Name]))].
rename_atom(Node, Atom) ->
@@ -2488,7 +2488,7 @@ rename(Files, Renamings, Opts) ->
true ->
dict:from_list(Renamings);
false ->
- report_error("bad module renaming: ~P.",
+ report_error("bad module renaming: ~tP.",
[Renamings, 10]),
exit(error)
end,
@@ -2672,7 +2672,7 @@ error_text(D, Name) ->
end.
error_text_1(D, Name) ->
- io_lib:fwrite("error: `~w', ~P.", [Name, D, 15]).
+ io_lib:fwrite("error: `~w', ~tP.", [Name, D, 15]).
check_records(Rs, Name) ->
case duplicates([N || {N, _} <- Rs]) of
@@ -2680,7 +2680,7 @@ check_records(Rs, Name) ->
ok;
Ns ->
report_error("in module `~w': "
- "multiply defined records: ~p.",
+ "multiply defined records: ~tp.",
[Name, Ns]),
exit(error)
end.
@@ -2694,7 +2694,7 @@ expand_imports(Is, Name) ->
ordsets:from_list(As);
Ns ->
report_error("in module `~w': "
- "multiply imported functions: ~p.",
+ "multiply imported functions: ~tp.",
[Name, Ns]),
exit(error)
end.
@@ -2968,7 +2968,7 @@ filename([]) ->
filename(N) when is_atom(N) ->
atom_to_list(N);
filename(N) ->
- report_error("bad filename: `~P'.", [N, 25]),
+ report_error("bad filename: `~tP'.", [N, 25]),
exit(error).
duplicates(Xs) ->
@@ -3031,7 +3031,7 @@ split_lines_1(Cs, Cs1, Ls) ->
%% Reporting
warning_unsafe_call(Name, Module, Target) ->
- report_warning("call to `~w' in module `~w' "
+ report_warning("call to `~tw' in module `~w' "
"possibly unsafe in `~s'.", [Name, Module, Target]).
warning_apply_2(Module, Target) ->
diff --git a/lib/syntax_tools/src/merl.erl b/lib/syntax_tools/src/merl.erl
index d6cf208998..b503944442 100644
--- a/lib/syntax_tools/src/merl.erl
+++ b/lib/syntax_tools/src/merl.erl
@@ -565,13 +565,13 @@ parse_5(Ts, Es) ->
-dialyzer({nowarn_function, parse_error/1}). % no local return
parse_error({L, M, R}) when is_atom(M), is_integer(L) ->
- fail("~w: ~s", [L, M:format_error(R)]);
+ fail("~w: ~ts", [L, M:format_error(R)]);
parse_error({{L,C}, M, R}) when is_atom(M), is_integer(L), is_integer(C) ->
- fail("~w:~w: ~s", [L,C,M:format_error(R)]);
+ fail("~w:~w: ~ts", [L,C,M:format_error(R)]);
parse_error({_, M, R}) when is_atom(M) ->
fail(M:format_error(R));
parse_error(R) ->
- fail("unknown parse error: ~p", [R]).
+ fail("unknown parse error: ~tp", [R]).
%% ------------------------------------------------------------------------
%% Templates, substitution and matching
diff --git a/lib/syntax_tools/test/syntax_tools_SUITE.erl b/lib/syntax_tools/test/syntax_tools_SUITE.erl
index 868f43b8ee..ae2c67c03e 100644
--- a/lib/syntax_tools/test/syntax_tools_SUITE.erl
+++ b/lib/syntax_tools/test/syntax_tools_SUITE.erl
@@ -239,6 +239,12 @@ t_erl_tidy(Config) when is_list(Config) ->
DataDir = ?config(data_dir, Config),
File = filename:join(DataDir,"erl_tidy_tilde.erl"),
ok = erl_tidy:file(File, [{stdout, true}]),
+
+ %% OTP-14471.
+ Old = process_flag(trap_exit, true),
+ NonExisting = filename:join(DataDir,"non_existing_file.erl"),
+ {'EXIT',{error,{0,file,enoent}}} = (catch erl_tidy:file(NonExisting)),
+ true = process_flag(trap_exit, Old),
ok.
test_comment_scan([],_) -> ok;
diff --git a/lib/tools/doc/src/lcnt.xml b/lib/tools/doc/src/lcnt.xml
index 31e5c241e9..5bdfc60448 100644
--- a/lib/tools/doc/src/lcnt.xml
+++ b/lib/tools/doc/src/lcnt.xml
@@ -109,14 +109,6 @@
statistics. If the server held any lock statistics data before the collect then
that data is lost.
</p>
- <note>
- <p>
- When collection occurs the runtime system transitions to a single thread,
- blocking all other threads. No other tasks will be scheduled during this
- operation. Depending on the size of the data this might take a long time
- (several seconds) and cause timeouts in the system.
- </p>
- </note>
</desc>
</func>
@@ -322,24 +314,22 @@
<func>
<name>apply(Fun) -> term()</name>
<fsummary>Same as <c>apply(Fun, [])</c>.</fsummary>
+ <type>
+ <v>Fun = fun()</v>
+ </type>
<desc>
<p>Same as <c>apply(Fun, [])</c>.</p>
</desc>
</func>
<func>
<name>apply(Fun, Args) -> term()</name>
- <fsummary>Clears counters, applies function and collects the profiling results.</fsummary>
+ <fsummary>Same as <c>apply(Module, Function, Args)</c>.</fsummary>
<type>
<v>Fun = fun()</v>
<v>Args = [term()]</v>
</type>
<desc>
- <p> Clears the lock counters and then setups the instrumentation to save all destroyed locks.
- After setup the fun is called, passing the elements in <c>Args</c> as arguments.
- When the fun returns the statistics are immediately collected to the server. After the
- collection the instrumentation is returned to its previous behavior.
- The result of the applied fun is returned.
- </p>
+ <p>Same as <c>apply(Module, Function, Args)</c>.</p>
</desc>
</func>
<func>
@@ -357,6 +347,13 @@
collection the instrumentation is returned to its previous behavior.
The result of the applied function is returned.
</p>
+ <warning>
+ <p>
+ This function should only be used for micro-benchmarks; it sets <c>copy_save</c>
+ to <c>true</c> for the duration of the call, which can quickly lead to running
+ out of memory.
+ </p>
+ </warning>
</desc>
</func>
@@ -429,6 +426,68 @@
<desc> <p>Clear the internal counters. Same as <c>lcnt:clear(Node)</c>.</p></desc>
</func>
+ <func>
+ <name>rt_mask() -> [category_atom()]</name>
+ <fsummary>Same as <c>rt_mask(node())</c>.</fsummary>
+ <desc><p>Same as <c>rt_mask(node())</c>.</p></desc>
+ </func>
+
+ <func>
+ <name>rt_mask(Node) -> [category_atom()]</name>
+ <fsummary>Returns the current lock category mask.</fsummary>
+ <type>
+ <v>Node = node()</v>
+ </type>
+ <desc>
+ <p>
+ Refer to <c>rt_mask/2</c> for a list of valid categories. All
+ categories are enabled by default.
+ </p>
+ </desc>
+ </func>
+
+ <func>
+ <name>rt_mask(Categories) -> ok | {error, copy_save_enabled}</name>
+ <fsummary>Same as <c>rt_mask(node(), Categories)</c>.</fsummary>
+ <type>
+ <v>Categories = [atom()]</v>
+ </type>
+ <desc><p>Same as <c>rt_mask(node(), Categories)</c>.</p></desc>
+ </func>
+
+ <func>
+ <name>rt_mask(Node, Categories) -> ok | {error, copy_save_enabled}</name>
+ <fsummary>Changes the lock category mask.</fsummary>
+ <type>
+ <v>Node = node()</v>
+ <v>Categories = [atom()]</v>
+ </type>
+ <desc>
+ <p>
+ Sets the lock category mask to the given categories.
+ </p>
+ <p>
+ This will fail if the <c>copy_save</c> option is enabled; see
+ <c>lcnt:rt_opt/2</c>.
+ </p>
+ <p>Valid categories are:</p>
+ <list>
+ <item><c>allocator</c></item>
+ <item><c>db</c> (ETS tables)</item>
+ <item><c>debug</c></item>
+ <item><c>distribution</c></item>
+ <item><c>generic</c></item>
+ <item><c>io</c></item>
+ <item><c>process</c></item>
+ <item><c>scheduler</c></item>
+ </list>
+ <p>
+ This list is subject to change at any time, as is the category any given lock
+ may belong to.
+ </p>
+ </desc>
+ </func>
+
<func>
<name>rt_opt({Type, bool()}) -> bool()</name>
<fsummary>Same as <c>rt_opt(node(), {Type, Opt})</c>.</fsummary>
@@ -442,16 +501,25 @@
<v>Type = copy_save | process_locks</v>
</type>
<desc>
- <p>Changes the lock counter behavior and returns the previous behaviour.</p>
<p>Option description:</p>
<taglist>
<tag><c>{copy_save, bool()}</c></tag>
- <item>Enable statistics saving from destroyed locks by copying. This might consume a lot of memory.
+ <item>Retains the statistics of destroyed locks.
<br/>Default: <c>false</c>
+ <warning>
+ <p>
+ This option will use a lot of memory when enabled, which must be
+ reclaimed with <c>lcnt:rt_clear</c>. Note that it makes no distinction
+ between locks that were destroyed and locks for which counting was
+ disabled, so enabling this option will disable changes to the lock
+ category mask.
+ </p>
+ </warning>
</item>
<tag><c>{process_locks, bool()}</c></tag>
- <item>Profile process locks.
+ <item>Profile process locks, equal to adding <c>process</c> to the lock category mask;
+ see <c>lcnt:rt_mask/2</c>
<br/>Default: <c>true</c>
</item>
</taglist>
diff --git a/lib/tools/doc/src/lcnt_chapter.xml b/lib/tools/doc/src/lcnt_chapter.xml
index c73fcb31e0..24b58136aa 100644
--- a/lib/tools/doc/src/lcnt_chapter.xml
+++ b/lib/tools/doc/src/lcnt_chapter.xml
@@ -29,7 +29,7 @@
<approved>nobody</approved>
<checked>no</checked>
<date>2009-11-26</date>
- <rev>PA1</rev>
+ <rev>PA2</rev>
<file>lcnt_chapter.xml</file>
</header>
<p>
@@ -97,8 +97,11 @@ ok
ok
</pre>
<p>
- Another way to to profile a specific function is to use <c>lcnt:apply/3</c> or <c>lcnt:apply/1</c> which does <c>lcnt:clear/0</c> before the function and <c>lcnt:collect/0</c> after its invocation.
- It also sets <c>copy_save</c> to <c>true</c> for the duration of the function call
+ Another way to to profile a specific function is to use <c>lcnt:apply/3</c> or <c>lcnt:apply/1</c>
+ which does <c>lcnt:clear/0</c> before the function and <c>lcnt:collect/0</c> after its invocation.
+ This method should only be used in micro-benchmarks since it sets <c>copy_save</c> to <c>true</c>
+ for the duration of the function call, which may cause the emulator to run out of memory if
+ attempted under load.
</p>
<pre>
Erlang R13B03 (erts-5.7.4) [source] [smp:8:8] [rq:8] [async-threads:0] [hipe]
diff --git a/lib/tools/doc/src/notes.xml b/lib/tools/doc/src/notes.xml
index bdd5455354..f0df43bf2b 100644
--- a/lib/tools/doc/src/notes.xml
+++ b/lib/tools/doc/src/notes.xml
@@ -31,6 +31,26 @@
</header>
<p>This document describes the changes made to the Tools application.</p>
+<section><title>Tools 2.10.1</title>
+
+ <section><title>Fixed Bugs and Malfunctions</title>
+ <list>
+ <item>
+ <p>
+ In OTP-20.0, the behavior of c, make, and ct_make was
+ changed so that in some cases the beam files by default
+ would be written to the directory where the source files
+ were found. This is now changed back to the old behavior
+ so beam files are by default written to current
+ directory.</p>
+ <p>
+ Own Id: OTP-14489 Aux Id: ERL-438 </p>
+ </item>
+ </list>
+ </section>
+
+</section>
+
<section><title>Tools 2.10</title>
<section><title>Fixed Bugs and Malfunctions</title>
diff --git a/lib/tools/doc/src/venn2.fig b/lib/tools/doc/src/venn2.fig
index 3694c12f0c..233686a729 100644
--- a/lib/tools/doc/src/venn2.fig
+++ b/lib/tools/doc/src/venn2.fig
@@ -1,4 +1,4 @@
-#FIG 3.2
+#FIG 3.2 Produced by xfig version 3.2.5c
Portrait
Center
Inches
@@ -7,34 +7,7 @@ Letter
Single
-2
1200 2
-6 3392 953 5034 3329
-6 3392 953 5034 2595
-6 3392 953 5034 2595
-5 1 0 1 -1 7 0 0 -1 0.000 0 0 0 0 2652.489 1773.500 3518 1357 3613 1774 3518 2190
-5 1 0 1 -1 7 0 0 -1 0.000 0 0 0 0 6306.956 1773.000 4028 2575 3891 1774 4028 971
-5 1 0 1 -1 7 0 0 -1 0.000 0 0 0 0 2105.283 1773.000 4402 971 4538 1774 4402 2575
-1 1 0 1 -1 7 0 0 -1 0.000 1 0.0000 4214 1774 820 821 4214 1774 3659 1171
-2 1 0 1 -1 7 0 0 -1 0.000 0 0 -1 0 0 1
- 4821 2325
-2 1 0 1 -1 7 0 0 -1 0.000 0 0 -1 0 0 2
- 4816 1217 4816 2329
-2 1 0 1 -1 7 0 0 -1 0.000 0 0 7 0 0 2
- 3392 1769 4816 1769
-2 1 0 1 0 0 100 0 1 0.000 0 0 -1 0 0 2
- 4816 1982 5008 1982
--6
-2 3 0 0 0 0 101 0 5 0.000 0 0 -1 0 0 36
- 4026 977 4011 1025 3996 1072 3981 1120 3966 1177 3954 1225
- 3944 1272 3929 1327 3919 1412 3909 1477 3899 1540 3894 1592
- 3894 1642 3891 1697 3889 1742 3889 1770 3394 1767 3396 1717
- 3399 1665 3409 1610 3424 1555 3439 1502 3464 1440 3489 1390
- 3516 1340 3551 1292 3584 1250 3631 1200 3679 1150 3731 1110
- 3801 1065 3869 1030 3931 1005 3986 982 4009 980 4026 977
--6
-4 0 0 101 0 0 11 0.0000 4 105 525 3965 3044 X - XU\001
-4 0 0 101 0 0 11 0.0000 4 150 1110 3688 3299 exports_not_used\001
--6
-6 5850 938 7560 3329
+6 5850 938 8070 3344
6 5884 938 7526 2580
6 5884 938 7526 2580
5 1 0 1 -1 7 0 0 -1 0.000 0 0 0 0 5144.489 1758.500 6010 1342 6105 1759 6010 2175
@@ -63,8 +36,8 @@ Single
7019 1990 7022 1945 7027 1900 7029 1855 7029 1805 7032 1765
7029 1752 7309 1757
-6
-4 0 0 101 0 0 11 0.0000 4 135 1470 6000 3014 L * (UU + (XU - LU))\001
-4 0 0 101 0 0 11 0.0000 4 150 1800 5850 3299 locals_not_used (simplified)\001
+4 0 0 101 0 0 11 0.0000 4 180 2070 6000 3014 (L-OL) * (UU + (XU-LU))\001
+4 0 0 101 0 0 11 0.0000 4 180 2160 5850 3299 locals_not_used (simplified)\001
-6
6 900 900 2550 3600
6 900 900 2550 2625
@@ -91,7 +64,34 @@ Single
2330 1222 2365 1265 2402 1317 2437 1382 2477 1455 2500 1517
2520 1585 2532 1645 2540 1712 2542 1780 2540 1842 2535 1907
2527 1957 2517 1990 2325 1987 2330 1222
-4 0 0 101 0 0 11 0.0000 4 105 780 1331 3044 XU - X - B\001
-4 0 0 101 0 0 11 0.0000 4 150 1260 1113 3314 undefined_functions\001
+4 0 0 101 0 0 11 0.0000 4 135 825 1331 3044 XU - X - B\001
+4 0 0 101 0 0 11 0.0000 4 180 1530 1113 3314 undefined_functions\001
4 0 0 100 0 0 10 0.0000 4 135 1005 1275 3525 (modules mode)\001
-6
+6 3392 953 5034 3329
+6 3392 953 5034 2595
+6 3392 953 5034 2595
+5 1 0 1 -1 7 0 0 -1 0.000 0 0 0 0 2652.489 1773.500 3518 1357 3613 1774 3518 2190
+5 1 0 1 -1 7 0 0 -1 0.000 0 0 0 0 6306.956 1773.000 4028 2575 3891 1774 4028 971
+5 1 0 1 -1 7 0 0 -1 0.000 0 0 0 0 2105.283 1773.000 4402 971 4538 1774 4402 2575
+1 1 0 1 -1 7 0 0 -1 0.000 1 0.0000 4214 1774 820 821 4214 1774 3659 1171
+2 1 0 1 -1 7 0 0 -1 0.000 0 0 -1 0 0 1
+ 4821 2325
+2 1 0 1 -1 7 0 0 -1 0.000 0 0 -1 0 0 2
+ 4816 1217 4816 2329
+2 1 0 1 -1 7 0 0 -1 0.000 0 0 7 0 0 2
+ 3392 1769 4816 1769
+2 1 0 1 0 0 100 0 1 0.000 0 0 -1 0 0 2
+ 4816 1982 5008 1982
+-6
+2 3 0 0 0 0 101 0 5 0.000 0 0 -1 0 0 36
+ 4026 977 4011 1025 3996 1072 3981 1120 3966 1177 3954 1225
+ 3944 1272 3929 1327 3919 1412 3909 1477 3899 1540 3894 1592
+ 3894 1642 3891 1697 3889 1742 3889 1770 3394 1767 3396 1717
+ 3399 1665 3409 1610 3424 1555 3439 1502 3464 1440 3489 1390
+ 3516 1340 3551 1292 3584 1250 3631 1200 3679 1150 3731 1110
+ 3801 1065 3869 1030 3931 1005 3986 982 4009 980 4026 977
+-6
+4 0 0 101 0 0 11 0.0000 4 135 555 3965 3044 X - XU\001
+4 0 0 101 0 0 11 0.0000 4 180 1350 3688 3299 exports_not_used\001
+-6
diff --git a/lib/tools/doc/src/venn2.gif b/lib/tools/doc/src/venn2.gif
index 4cfea24646..bb12f4bd1f 100644
--- a/lib/tools/doc/src/venn2.gif
+++ b/lib/tools/doc/src/venn2.gif
Binary files differ
diff --git a/lib/tools/doc/src/xref.xml b/lib/tools/doc/src/xref.xml
index 8c49f3a206..6f833246ad 100644
--- a/lib/tools/doc/src/xref.xml
+++ b/lib/tools/doc/src/xref.xml
@@ -4,7 +4,7 @@
<erlref>
<header>
<copyright>
- <year>2000</year><year>2016</year>
+ <year>2000</year><year>2017</year>
<holder>Ericsson AB. All Rights Reserved.</holder>
</copyright>
<legalnotice>
@@ -347,6 +347,9 @@ represented by
<item>Locally Used Functions (*). Functions of all modules that have
been used in some local call.
</item>
+ <tag><c>OL</c></tag>
+ <item>Functions with an attribute tag <c>on_load</c> (*).
+ </item>
<tag><c>LC</c></tag>
<item>Local Calls (*).</item>
<tag><c>XC</c></tag>
@@ -393,6 +396,7 @@ facts about the
<c>LU</c> and <c>XU</c> may have elements in common. Put in
another way:</item>
<item><c>V</c> is equal to <c>UU + XU + LU</c>.</item>
+ <item><c>OL</c> is a subset of <c>F</c>.</item>
<item><c>E</c> is equal to <c>LC + XC</c>. Note that <c>LC</c>
and <c>XC</c> may have elements in common, namely if some
function is locally and externally used from one and the same
@@ -559,8 +563,10 @@ Two functions (modules,
analyzing operators:
</p>
<list type="bulleted">
- <item>Expression ::= Expression GraphOp Expression</item>
- <item>GraphOp ::= <c>components</c> | <c>condensation</c> | <c>of</c></item>
+ <item>Expression ::= Expression BinaryGraphOp Expression</item>
+ <item>Expression ::= UnaryGraphOp Expression</item>
+ <item>UnaryGraphOp ::= <c>components</c> | <c>condensation</c></item>
+ <item>BinaryGraphOp ::= <c>of</c></item>
</list>
<p>As was mentioned before, the graph analyses operate on
the <c>digraph</c> representation of graphs.
diff --git a/lib/tools/emacs/erlang.el b/lib/tools/emacs/erlang.el
index 438abc2d29..411e0e13df 100644
--- a/lib/tools/emacs/erlang.el
+++ b/lib/tools/emacs/erlang.el
@@ -1,10 +1,10 @@
-;;; erlang.el --- Major modes for editing and running Erlang
+;;; erlang.el --- Major modes for editing and running Erlang -*- lexical-binding: t; -*-
;; Copyright (C) 2004 Free Software Foundation, Inc.
;; Author: Anders Lindgren
;; Keywords: erlang, languages, processes
;; Date: 2011-12-11
-;; Version: 2.7.0
+;; Version: 2.8.0
;; Package-Requires: ((emacs "24.1"))
;; %CopyrightBegin%
@@ -84,7 +84,7 @@
"The Erlang programming language."
:group 'languages)
-(defconst erlang-version "2.7"
+(defconst erlang-version "2.8.0"
"The version number of Erlang mode.")
(defcustom erlang-root-dir nil
@@ -931,6 +931,7 @@ resulting regexp is surrounded by \\_< and \\_>."
"has_prepared_code_on_load"
"hibernate"
"insert_element"
+ "iolist_to_iovec"
"is_builtin"
"load_nif"
"loaded"
@@ -1019,26 +1020,15 @@ files written in other languages than Erlang.")
If nil, the inferior shell replaces the window. This is the traditional
behaviour.")
-(defconst inferior-erlang-use-cmm (boundp 'minor-mode-overriding-map-alist)
- "Non-nil means use `compilation-minor-mode' in Erlang shell.")
-
(defvar erlang-mode-map
(let ((map (make-sparse-keymap)))
- (unless (boundp 'indent-line-function)
- (define-key map "\t" 'erlang-indent-command))
(define-key map ";" 'erlang-electric-semicolon)
(define-key map "," 'erlang-electric-comma)
(define-key map "<" 'erlang-electric-lt)
(define-key map ">" 'erlang-electric-gt)
(define-key map "\C-m" 'erlang-electric-newline)
- (if (not (boundp 'delete-key-deletes-forward))
- (define-key map "\177" 'backward-delete-char-untabify)
- (define-key map [(backspace)] 'backward-delete-char-untabify))
- ;;(unless (boundp 'fill-paragraph-function)
+ (define-key map [(backspace)] 'backward-delete-char-untabify)
(define-key map "\M-q" 'erlang-fill-paragraph)
- (unless (boundp 'beginning-of-defun-function)
- (define-key map "\M-\C-a" 'erlang-beginning-of-function)
- (define-key map "\M-\C-e" 'erlang-end-of-function))
(define-key map "\M-\t" 'erlang-complete-tag)
(define-key map "\C-c\M-\t" 'tempo-complete-tag)
(define-key map "\M-+" 'erlang-find-next-tag)
@@ -1057,8 +1047,6 @@ behaviour.")
(define-key map "\C-c\C-y" 'erlang-clone-arguments)
(define-key map "\C-c\C-a" 'erlang-align-arrows)
(define-key map "\C-c\C-z" 'erlang-shell-display)
- (unless inferior-erlang-use-cmm
- (define-key map "\C-x`" 'erlang-next-error))
map)
"Keymap used in Erlang mode.")
(defvar erlang-mode-abbrev-table nil
@@ -2083,12 +2071,6 @@ This function is aware of imported functions."
(when funcname
(erlang-man-find-function (current-buffer) funcname))))))
-(defun erlang-default-function-or-module ()
- (let ((id (erlang-get-identifier-at-point)))
- (if (eq (erlang-id-kind id) 'qualified-function)
- (format "%s:%s" (erlang-id-module id) (erlang-id-name id))
- (erlang-id-name id))))
-
;; Should the defadvice be at the top level, the package `advice' would
;; be required. Now it is only required when this functionality
@@ -3396,14 +3378,6 @@ at the end."
;;; Information retrieval functions.
-(defun erlang-buffer-substring (beg end)
- "Like `buffer-substring-no-properties'.
-Although, this function works on all versions of Emacs."
- (if (fboundp 'buffer-substring-no-properties)
- (funcall (symbol-function 'buffer-substring-no-properties) beg end)
- (buffer-substring beg end)))
-
-
(defun erlang-get-module ()
"Return the name of the module as specified by `-module'.
@@ -3421,7 +3395,7 @@ Return nil if file contains no `-module' attribute."
"\\)?\\)\\s *)\\s *\\."))
(point-max) t)
(erlang-remove-quotes
- (erlang-buffer-substring (match-beginning 1)
+ (buffer-substring-no-properties (match-beginning 1)
(match-end 1)))
nil)
(store-match-data md))))))
@@ -3475,10 +3449,10 @@ corresponds to the order of the parsed Erlang list."
(setq res (cons
(cons
(erlang-remove-quotes
- (erlang-buffer-substring
+ (buffer-substring-no-properties
(match-beginning 1) (match-end 1)))
(string-to-number
- (erlang-buffer-substring
+ (buffer-substring-no-properties
(match-beginning
(+ 1 erlang-atom-regexp-matches))
(match-end
@@ -3525,7 +3499,7 @@ function and arity as cdr part."
(erlang-skip-blank)
(if (looking-at erlang-atom-regexp)
(let ((module (erlang-remove-quotes
- (erlang-buffer-substring
+ (buffer-substring-no-properties
(match-beginning 0)
(match-end 0)))))
(goto-char (match-end 0))
@@ -3558,7 +3532,7 @@ Normally used in conjunction with `erlang-beginning-of-clause', e.g.:
(let ((n (if arg 0 1)))
(and (looking-at (eval-when-compile
(concat "^" erlang-atom-regexp "\\s *(")))
- (erlang-buffer-substring (match-beginning n) (match-end n)))))
+ (buffer-substring-no-properties (match-beginning n) (match-end n)))))
(defun erlang-get-function-arrow ()
@@ -3572,7 +3546,7 @@ Normally used in conjunction with `erlang-beginning-of-clause', e.g.:
(and
(save-excursion
(re-search-forward "->" (point-max) t)
- (erlang-buffer-substring (- (point) 2) (+ (point) 1)))))
+ (buffer-substring-no-properties (- (point) 2) (+ (point) 1)))))
(defun erlang-get-function-arity ()
"Return the number of arguments of function at point, or nil."
@@ -3638,12 +3612,14 @@ The return value is a string of the form \"foo/1\"."
(let ((start (match-end 0)))
(goto-char (- start 1))
(forward-sexp)
- (erlang-buffer-substring start (- (point) 1)))
+ (buffer-substring-no-properties start (- (point) 1)))
(error nil)))))
-;; Keeping erlang-get-function-under-point for backward compatibility.
-;; It is used by erldoc.el and maybe other code out there.
+;; erlang-get-function-under-point is replaced by
+;; erlang-get-identifier-at-point as far as internal erlang.el usage
+;; is concerned. But it is kept for backward compatibility. It is
+;; used by erldoc.el and maybe other code out there.
(defun erlang-get-function-under-point ()
"Return the module and function under the point, or nil.
@@ -3701,10 +3677,10 @@ of arguments could be found, otherwise nil."
(defun erlang-get-qualified-function-id-at-point ()
(let ((kind 'qualified-function)
(module (erlang-remove-quotes
- (erlang-buffer-substring
+ (buffer-substring-no-properties
(match-beginning 1) (match-end 1))))
(name (erlang-remove-quotes
- (erlang-buffer-substring
+ (buffer-substring-no-properties
(match-beginning (1+ erlang-atom-regexp-matches))
(match-end (1+ erlang-atom-regexp-matches)))))
(arity (progn
@@ -3716,14 +3692,14 @@ of arguments could be found, otherwise nil."
(let ((kind 'module)
(module nil)
(name (erlang-remove-quotes
- (erlang-buffer-substring (match-beginning 1)
+ (buffer-substring-no-properties (match-beginning 1)
(match-end 1))))
(arity nil))
(list kind module name arity)))
(defun erlang-get-some-other-id-at-point ()
(let ((name (erlang-remove-quotes
- (erlang-buffer-substring
+ (buffer-substring-no-properties
(match-beginning 0) (match-end 0))))
(imports (erlang-get-import))
kind module arity)
@@ -3790,6 +3766,21 @@ of arguments could be found, otherwise nil."
(nth 3 (erlang-id-to-list id)))
+(defun erlang-default-function-or-module ()
+ (erlang-with-id (kind module name) (erlang-get-identifier-at-point)
+ (let ((x (cond ((eq kind 'module)
+ (format "%s:" name))
+ ((eq kind 'record)
+ (format "-record(%s" name))
+ ((eq kind 'macro)
+ (format "-define(%s" name))
+ (t
+ name))))
+ (if module
+ (format "%s:%s" module x)
+ x))))
+
+
;; TODO: Escape single quotes inside the string without
;; replace-regexp-in-string.
(defun erlang-add-quotes-if-needed (str)
@@ -4881,7 +4872,12 @@ considered first when it is time to jump to the definition.")
'(progn
(cl-defmethod xref-backend-identifier-at-point
((_backend (eql erlang-etags)))
- (erlang-id-to-string (erlang-get-identifier-at-point)))
+ (if (eq this-command 'xref-find-references)
+ (if (use-region-p)
+ (buffer-substring-no-properties (region-beginning)
+ (region-end))
+ (thing-at-point 'symbol))
+ (erlang-id-to-string (erlang-get-identifier-at-point))))
(cl-defmethod xref-backend-definitions
((_backend (eql erlang-etags)) identifier)
@@ -4992,9 +4988,10 @@ considered first when it is time to jump to the definition.")
(and (fboundp 'xref-make)
(fboundp 'xref-make-file-location)
(let* ((first-time t)
+ (cbuf (current-buffer))
xrefs matching-files)
(save-excursion
- (while (visit-tags-table-buffer (not first-time))
+ (while (erlang-visit-tags-table-buffer (not first-time) cbuf)
(setq first-time nil)
(let ((files (tags-table-files)))
(while files
@@ -5010,6 +5007,10 @@ considered first when it is time to jump to the definition.")
(setq files (cdr files))))))
(nreverse xrefs))))
+(defun erlang-visit-tags-table-buffer (cont cbuf)
+ (if (< emacs-major-version 26)
+ (visit-tags-table-buffer cont)
+ (visit-tags-table-buffer cont cbuf)))
(defun erlang-xref-find-definitions-module-tag (module
tag
@@ -5113,7 +5114,7 @@ Erlang compilation package.")
"Command to execute to go to the next error.
Change this variable to use your favorite Erlang compilation
-package. Not used in Emacs 21.")
+package.")
;;;###autoload
@@ -5172,6 +5173,13 @@ future, a new shell on an already running host will be started."
(defvar erlang-shell-buffer-name "*erlang*"
"The name of the Erlang link shell buffer.")
+(defcustom erlang-shell-prompt-read-only t
+ "If non-nil, the prompt will be read-only.
+
+Also see the description of `ielm-prompt-read-only'."
+ :type 'boolean
+ :package-version '(erlang . "2.8.0"))
+
(defvar erlang-shell-mode-map nil
"Keymap used by Erlang shells.")
@@ -5212,17 +5220,11 @@ The following special commands are available:
(setq erlang-shell-mode-map (copy-keymap comint-mode-map))
(erlang-shell-mode-commands erlang-shell-mode-map))
(use-local-map erlang-shell-mode-map)
- (unless inferior-erlang-use-cmm
- ;; This was originally not a marker, but it needs to be, at least
- ;; in Emacs 21, and should be backwards-compatible. Otherwise,
- ;; would need to test whether compilation-parsing-end is a marker
- ;; after requiring `compile'.
- (set (make-local-variable 'compilation-parsing-end) (copy-marker 1))
- (set (make-local-variable 'compilation-error-list) nil)
- (set (make-local-variable 'compilation-old-error-list) nil))
;; Needed when compiling directly from the Erlang shell.
(setq compilation-last-buffer (current-buffer))
(setq comint-prompt-regexp "^[^>=]*> *")
+ (make-local-variable 'comint-prompt-read-only)
+ (setq comint-prompt-read-only erlang-shell-prompt-read-only)
(setq comint-eol-on-send t)
(setq comint-input-ignoredups t)
(setq comint-scroll-show-maximum-output t)
@@ -5236,24 +5238,20 @@ The following special commands are available:
(comint-read-input-ring t)
(make-local-variable 'kill-buffer-hook)
(add-hook 'kill-buffer-hook 'comint-write-input-ring)
- ;; At least in Emacs 21, we need to be in `compilation-minor-mode'
- ;; for `next-error' to work. We can avoid it clobbering the shell
- ;; keys thus.
- (when inferior-erlang-use-cmm
- (compilation-minor-mode 1)
- (set (make-local-variable 'minor-mode-overriding-map-alist)
- `((compilation-minor-mode
- . ,(let ((map (make-sparse-keymap)))
- ;; It would be useful to put keymap properties on the
- ;; error lines so that we could use RET and mouse-2
- ;; on them directly.
- (when (boundp 'compilation-skip-threshold) ; new compile.el
- (define-key map [mouse-2] #'erlang-mouse-2-command)
- (define-key map "\C-m" #'erlang-RET-command))
- (if (boundp 'compilation-menu-map)
- (define-key map [menu-bar compilation]
- (cons "Errors" compilation-menu-map)))
- map)))))
+ (compilation-minor-mode 1)
+ (set (make-local-variable 'minor-mode-overriding-map-alist)
+ `((compilation-minor-mode
+ . ,(let ((map (make-sparse-keymap)))
+ ;; It would be useful to put keymap properties on the
+ ;; error lines so that we could use RET and mouse-2
+ ;; on them directly.
+ (when (boundp 'compilation-skip-threshold) ; new compile.el
+ (define-key map [mouse-2] #'erlang-mouse-2-command)
+ (define-key map "\C-m" #'erlang-RET-command))
+ (if (boundp 'compilation-menu-map)
+ (define-key map [menu-bar compilation]
+ (cons "Errors" compilation-menu-map)))
+ map))))
(erlang-tags-init)
(run-hooks 'erlang-shell-mode-hook))
@@ -5282,9 +5280,7 @@ Selects Comint or Compilation mode command as appropriate."
(define-key map "\C-a" 'comint-bol) ; Normally the other way around.
(define-key map "\C-c\C-a" 'beginning-of-line)
(define-key map "\C-d" nil) ; Was `comint-delchar-or-maybe-eof'
- (define-key map "\M-\C-m" 'compile-goto-error)
- (unless inferior-erlang-use-cmm
- (define-key map "\C-x`" 'erlang-next-error)))
+ (define-key map "\M-\C-m" 'compile-goto-error))
;;;
;;; Inferior Erlang -- Run an Erlang shell as a subprocess.
@@ -5895,35 +5891,6 @@ Tab characters are counted by their visual width."
(if (looking-at "[a-z0-9_]+")
(match-string 0))))
-;; Aliases for backward compatibility with older versions of Erlang Mode.
-;;
-;; Unfortuantely, older versions of Emacs doesn't have `defalias' and
-;; `make-obsolete' so we have to define our own `obsolete' function.
-
-(defun erlang-obsolete (sym newdef)
- "Make the obsolete function SYM refer to the defined function NEWDEF.
-
-Simplified version of a combination `defalias' and `make-obsolete',
-it assumes that NEWDEF is loaded."
- (defalias sym (symbol-function newdef))
- (make-obsolete sym newdef "long ago"))
-
-
-(erlang-obsolete 'calculate-erlang-indent 'erlang-calculate-indent)
-(erlang-obsolete 'calculate-erlang-stack-indent
- 'erlang-calculate-stack-indent)
-(erlang-obsolete 'at-erlang-keyword 'erlang-at-keyword)
-(erlang-obsolete 'at-erlang-operator 'erlang-at-operator)
-(erlang-obsolete 'beginning-of-erlang-clause 'erlang-beginning-of-clause)
-(erlang-obsolete 'end-of-erlang-clause 'erlang-end-of-clause)
-(erlang-obsolete 'mark-erlang-clause 'erlang-mark-clause)
-(erlang-obsolete 'beginning-of-erlang-function 'erlang-beginning-of-function)
-(erlang-obsolete 'end-of-erlang-function 'erlang-end-of-function)
-(erlang-obsolete 'mark-erlang-function 'erlang-mark-function)
-(erlang-obsolete 'pass-over-erlang-clause 'erlang-pass-over-function)
-(erlang-obsolete 'name-of-erlang-function 'erlang-name-of-function)
-
-
(defconst erlang-unload-hook
(list (lambda ()
(ad-unadvise 'Man-notify-when-ready)
diff --git a/lib/tools/src/eprof.erl b/lib/tools/src/eprof.erl
index 3ae899a078..535ddbcd04 100644
--- a/lib/tools/src/eprof.erl
+++ b/lib/tools/src/eprof.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 1996-2016. All Rights Reserved.
+%% Copyright Ericsson AB 1996-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -246,7 +246,7 @@ handle_call(profile_stop, _From, #state{ profiling = true } = S) ->
%% logfile
handle_call({logfile, File}, _From, #state{ fd = OldFd } = S) ->
- case file:open(File, [write]) of
+ case file:open(File, [write, {encoding, utf8}]) of
{ok, Fd} ->
case OldFd of
undefined -> ok;
@@ -478,11 +478,11 @@ string_bp_mfa([{Mfa, {Count, Time}}|Mfas], Tus, {MfaW, CountW, PercW, TimeW, TpC
Stpc = s("~.2f", [divide(Time,Count)]),
string_bp_mfa(Mfas, Tus, {
- erlang:max(MfaW, length(Smfa)),
- erlang:max(CountW,length(Scount)),
- erlang:max(PercW, length(Sperc)),
- erlang:max(TimeW, length(Stime)),
- erlang:max(TpCW, length(Stpc))
+ erlang:max(MfaW, string:length(Smfa)),
+ erlang:max(CountW,string:length(Scount)),
+ erlang:max(PercW, string:length(Sperc)),
+ erlang:max(TimeW, string:length(Stime)),
+ erlang:max(TpCW, string:length(Stpc))
}, [[Smfa, Scount, Sperc, Stime, Stpc] | Strings]).
print_bp_mfa(Mfas, {Tn, Tus}, Fd, Opts) ->
@@ -491,11 +491,11 @@ print_bp_mfa(Mfas, {Tn, Tus}, Fd, Opts) ->
TnStr = s(Tn),
TusStr = s(Tus),
TuspcStr = s("~.2f", [divide(Tus,Tn)]),
- Ws = {erlang:max(length("FUNCTION"), MfaW),
- lists:max([length("CALLS"), CountW, length(TnStr)]),
- erlang:max(length(" %"), PercW),
- lists:max([length("TIME"), TimeW, length(TusStr)]),
- lists:max([length("uS / CALLS"), TpCW, length(TuspcStr)])},
+ Ws = {erlang:max(string:length("FUNCTION"), MfaW),
+ lists:max([string:length("CALLS"), CountW, string:length(TnStr)]),
+ erlang:max(string:length(" %"), PercW),
+ lists:max([string:length("TIME"), TimeW, string:length(TusStr)]),
+ lists:max([string:length("uS / CALLS"), TpCW, string:length(TuspcStr)])},
format(Fd, Ws, ["FUNCTION", "CALLS", " %", "TIME", "uS / CALLS"]),
format(Fd, Ws, ["--------", "-----", "-------", "----", "----------"]),
lists:foreach(fun (String) -> format(Fd, Ws, String) end, Strs),
@@ -503,13 +503,13 @@ print_bp_mfa(Mfas, {Tn, Tus}, Fd, Opts) ->
format(Fd, Ws, ["Total:", TnStr, "100.00%", TusStr, TuspcStr]),
ok.
-s({M,F,A}) -> s("~w:~w/~w",[M,F,A]);
-s(Term) -> s("~p", [Term]).
+s({M,F,A}) -> s("~w:~tw/~w",[M,F,A]);
+s(Term) -> s("~tp", [Term]).
s(Format, Terms) -> lists:flatten(io_lib:format(Format, Terms)).
format(Fd, {MfaW, CountW, PercW, TimeW, TpCW}, Strings) ->
- format(Fd, s("~~.~ps ~~~ps ~~~ps ~~~ps [~~~ps]~~n", [MfaW, CountW, PercW, TimeW, TpCW]), Strings);
+ format(Fd, s("~~.~wts ~~~ws ~~~ws ~~~ws [~~~ws]~~n", [MfaW, CountW, PercW, TimeW, TpCW]), Strings);
format(undefined, Format, Strings) ->
io:format(Format, Strings),
ok;
diff --git a/lib/tools/src/fprof.erl b/lib/tools/src/fprof.erl
index d1a4624419..2fe42beb03 100644
--- a/lib/tools/src/fprof.erl
+++ b/lib/tools/src/fprof.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2001-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2001-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -1475,7 +1475,7 @@ info_suspect_call(GroupLeader, GroupLeader, _, _) ->
ok;
info_suspect_call(GroupLeader, _, Func, Pid) ->
io:format(GroupLeader,
- "~nWarning: ~p called in ~p - trace may become corrupt!~n",
+ "~nWarning: ~tp called in ~p - trace may become corrupt!~n",
parsify([Func, Pid])).
info(GroupLeader, GroupLeader, _, _) ->
@@ -1498,13 +1498,13 @@ dump_stack(Dump, Stack, Term) ->
{N, length(hd(Stack))}
end
end,
- io:format(Dump, "~s~p.~n", [lists:duplicate(Depth, " "), parsify(Term)]),
+ io:format(Dump, "~s~tp.~n", [lists:duplicate(Depth, " "), parsify(Term)]),
true.
dump(undefined, _) ->
false;
dump(Dump, Term) ->
- io:format(Dump, "~p.~n", [parsify(Term)]),
+ io:format(Dump, "~tp.~n", [parsify(Term)]),
true.
@@ -2603,17 +2603,17 @@ println({Io, [W1, W2, W3, W4]}, Head,
println({Io, _}, Head,
[],
Tail, Comment) ->
- io:format(Io, "~s~s~s~n",
+ io:format(Io, "~s~ts~ts~n",
[pad(Head, $ , 3), Tail, Comment]);
println({Io, _}, Head,
{Tag, Term},
Tail, Comment) ->
- io:format(Io, "~s~p, ~p~s~s~n",
+ io:format(Io, "~s~tp, ~tp~ts~ts~n",
[pad(Head, $ , 3), parsify(Tag), parsify(Term), Tail, Comment]);
println({Io, _}, Head,
Term,
Tail, Comment) ->
- io:format(Io, "~s~p~s~s~n",
+ io:format(Io, "~s~tp~ts~ts~n",
[pad(Head, $ , 3), parsify(Term), Tail, Comment]).
@@ -2636,22 +2636,32 @@ funcstat_pd(Pid, Func1, Func0, Clocks) ->
#funcstat{callers_sum = CallersSum,
callers = Callers} = FuncstatCallers ->
FuncstatCallers#funcstat{
- callers_sum = clocks_sum(CallersSum, Clocks, Func0),
- callers = [Clocks#clocks{id = Func1} | Callers]}
- end),
+ callers_sum = clocks_sum(CallersSum, Clocks, Func0),
+ callers = insert_call(Clocks, Func1, Callers)}
+ end),
put({Pid, Func1},
case get({Pid, Func1}) of
undefined ->
- #funcstat{callers_sum = #clocks{id = Func1},
+ #funcstat{callers_sum = #clocks{id = Func1},
called_sum = Clocks#clocks{id = Func1},
called = [Clocks#clocks{id = Func0}]};
#funcstat{called_sum = CalledSum,
called = Called} = FuncstatCalled ->
FuncstatCalled#funcstat{
called_sum = clocks_sum(CalledSum, Clocks, Func1),
- called = [Clocks#clocks{id = Func0} | Called]}
+ called = insert_call(Clocks, Func0, Called)}
end).
+insert_call(Clocks, Func, ClocksList) ->
+ insert_call(Clocks, Func, ClocksList, []).
+
+insert_call(Clocks, Func, [#clocks{id = Func} = C | T], Acc) ->
+ [clocks_sum(C, Clocks, Func) | T ++ Acc];
+insert_call(Clocks, Func, [H | T], Acc) ->
+ insert_call(Clocks, Func, T, [H | Acc]);
+insert_call(Clocks, Func, [], Acc) ->
+ [Clocks#clocks{id = Func} | Acc].
+
%% Sort a list of funcstat records,
@@ -2710,7 +2720,7 @@ postsort_r([[_|C] | L], R) ->
flat_format(F, Trailer) when is_float(F) ->
lists:flatten([io_lib:format("~.3f", [F]), Trailer]);
flat_format(W, Trailer) ->
- lists:flatten([io_lib:format("~p", [W]), Trailer]).
+ lists:flatten([io_lib:format("~tp", [W]), Trailer]).
%% Format, flatten, and pad.
flat_format(Term, Trailer, Width) ->
diff --git a/lib/tools/src/lcnt.erl b/lib/tools/src/lcnt.erl
index d881fedbd5..139b3d8a4a 100644
--- a/lib/tools/src/lcnt.erl
+++ b/lib/tools/src/lcnt.erl
@@ -34,8 +34,11 @@
-export([start/0,
stop/0]).
-%% erts_debug:lock_counters api
--export([rt_collect/0,
+%% erts_debug:lcnt_xxx api
+-export([rt_mask/0,
+ rt_mask/1,
+ rt_mask/2,
+ rt_collect/0,
rt_collect/1,
rt_clear/0,
rt_clear/1,
@@ -134,27 +137,61 @@ start_internal() ->
%% -------------------------------------------------------------------- %%
%%
-%% API erts_debug:lock_counters
+%% API erts_debug:lcnt_xxx
%%
%% -------------------------------------------------------------------- %%
-rt_collect() ->
- erts_debug:lock_counters(info).
+rt_mask(Node, Categories) when is_atom(Node), is_list(Categories) ->
+ rpc:call(Node, lcnt, rt_mask, [Categories]).
+
+rt_mask(Node) when is_atom(Node) ->
+ rpc:call(Node, lcnt, rt_mask, []);
+
+rt_mask(Categories) when is_list(Categories) ->
+ case erts_debug:lcnt_control(copy_save) of
+ false ->
+ erts_debug:lcnt_control(mask, Categories);
+ true ->
+ {error, copy_save_enabled}
+ end.
+
+rt_mask() ->
+ erts_debug:lcnt_control(mask).
rt_collect(Node) ->
- rpc:call(Node, erts_debug, lock_counters, [info]).
+ rpc:call(Node, lcnt, rt_collect, []).
+rt_collect() ->
+ erts_debug:lcnt_collect().
+rt_clear(Node) ->
+ rpc:call(Node, lcnt, rt_clear, []).
rt_clear() ->
- erts_debug:lock_counters(clear).
+ erts_debug:lcnt_clear().
-rt_clear(Node) ->
- rpc:call(Node, erts_debug, lock_counters, [clear]).
+rt_opt(Node, Arg) ->
+ rpc:call(Node, lcnt, rt_opt, [Arg]).
-rt_opt({Type, Opt}) ->
- erts_debug:lock_counters({Type, Opt}).
+%% Compatibility shims for the "process/port_locks" options mentioned in the
+%% manual.
+rt_opt({process_locks, Enable}) ->
+ toggle_category(process, Enable);
+rt_opt({port_locks, Enable}) ->
+ toggle_category(io, Enable);
-rt_opt(Node, {Type, Opt}) ->
- rpc:call(Node, erts_debug, lock_counters, [{Type, Opt}]).
+rt_opt({Type, NewVal}) ->
+ PreviousVal = erts_debug:lcnt_control(Type),
+ erts_debug:lcnt_control(Type, NewVal),
+ PreviousVal.
+
+toggle_category(Category, true) ->
+ PreviousMask = erts_debug:lcnt_control(mask),
+ erts_debug:lcnt_control(mask, [Category | PreviousMask]),
+ lists:member(Category, PreviousMask);
+
+toggle_category(Category, false) ->
+ PreviousMask = erts_debug:lcnt_control(mask),
+ erts_debug:lcnt_control(mask, lists:delete(Category, PreviousMask)),
+ lists:member(Category, PreviousMask).
%% -------------------------------------------------------------------- %%
%%
@@ -192,13 +229,9 @@ call(Msg) -> gen_server:call(?MODULE, Msg, infinity).
%% -------------------------------------------------------------------- %%
apply(M,F,As) when is_atom(M), is_atom(F), is_list(As) ->
- ok = start_internal(),
- Opt = lcnt:rt_opt({copy_save, true}),
- lcnt:clear(),
- Res = erlang:apply(M,F,As),
- lcnt:collect(),
- lcnt:rt_opt({copy_save, Opt}),
- Res.
+ apply(fun() ->
+ erlang:apply(M,F,As)
+ end).
apply(Fun) when is_function(Fun) ->
lcnt:apply(Fun, []).
@@ -209,7 +242,9 @@ apply(Fun, As) when is_function(Fun) ->
lcnt:clear(),
Res = erlang:apply(Fun, As),
lcnt:collect(),
- lcnt:rt_opt({copy_save, Opt}),
+ %% _ is bound to silence a dialyzer warning; it used to fail silently and
+ %% we don't want to change the error semantics.
+ _ = lcnt:rt_opt({copy_save, Opt}),
Res.
all_conflicts() -> all_conflicts(time).
diff --git a/lib/tools/src/make.erl b/lib/tools/src/make.erl
index ce30156db6..6554d338af 100644
--- a/lib/tools/src/make.erl
+++ b/lib/tools/src/make.erl
@@ -267,15 +267,47 @@ include_opt([]) ->
recompile(File, true, _Load, _Opts) ->
io:format("Out of date: ~ts\n",[File]);
-recompile(File, false, noload, Opts) ->
+recompile(File, false, Load, Opts) ->
io:format("Recompile: ~ts\n",[File]),
- compile:file(File, [report_errors, report_warnings, error_summary |Opts]);
-recompile(File, false, load, Opts) ->
- io:format("Recompile: ~ts\n",[File]),
- c:c(File, Opts);
-recompile(File, false, netload, Opts) ->
- io:format("Recompile: ~ts\n",[File]),
- c:nc(File, Opts).
+ case compile:file(File, [report_errors, report_warnings |Opts]) of
+ Ok when is_tuple(Ok), element(1,Ok)==ok ->
+ maybe_load(element(2,Ok), Load, Opts);
+ _Error ->
+ error
+ end.
+
+maybe_load(_Mod, noload, _Opts) ->
+ ok;
+maybe_load(Mod, Load, Opts) ->
+ %% We have compiled File with options Opts. Find out where the
+ %% output file went to, and load it.
+ case compile:output_generated(Opts) of
+ true ->
+ Dir = proplists:get_value(outdir,Opts,"."),
+ do_load(Dir, Mod, Load);
+ false ->
+ io:format("** Warning: No object file created - nothing loaded **~n"),
+ ok
+ end.
+
+do_load(Dir, Mod, load) ->
+ code:purge(Mod),
+ case code:load_abs(filename:join(Dir, Mod),Mod) of
+ {module,Mod} ->
+ {ok,Mod};
+ Other ->
+ Other
+ end;
+do_load(Dir, Mod, netload) ->
+ Obj = atom_to_list(Mod) ++ code:objfile_extension(),
+ Fname = filename:join(Dir, Obj),
+ case file:read_file(Fname) of
+ {ok,Bin} ->
+ rpc:eval_everywhere(code,load_binary,[Mod,Fname,Bin]),
+ {ok,Mod};
+ Other ->
+ Other
+ end.
exists(File) ->
case file:read_file_info(File) of
diff --git a/lib/tools/src/tools.app.src b/lib/tools/src/tools.app.src
index 12f0cfd2df..8beef49bf9 100644
--- a/lib/tools/src/tools.app.src
+++ b/lib/tools/src/tools.app.src
@@ -41,6 +41,6 @@
]
},
{runtime_dependencies, ["stdlib-3.1","runtime_tools-1.8.14",
- "kernel-3.0","erts-7.0","compiler-5.0"]}
+ "kernel-5.4","erts-9.1","compiler-5.0"]}
]
}.
diff --git a/lib/tools/src/xref_base.erl b/lib/tools/src/xref_base.erl
index 8d2cc07e40..a28c6ee283 100644
--- a/lib/tools/src/xref_base.erl
+++ b/lib/tools/src/xref_base.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -400,26 +400,28 @@ analysis(locals_not_used, functions) ->
%% used (indirectly) from any export: "(domain EE + range EE) * L".
%% But then we only get locals that make some calls, so we add
%% locals that are not used at all: "L * (UU + XU - LU)".
- "L * ((UU + XU - LU) + domain EE + range EE)";
+ %% We also need to exclude functions with the -on_load() attribute:
+ %% (L - OL) is used rather than just L.
+ "(L - OL) * ((UU + XU - LU) + domain EE + range EE)";
analysis(exports_not_used, _) ->
%% Local calls are not considered here. "X * UU" would do otherwise.
"X - XU";
analysis({call, F}, functions) ->
- make_query("range (E | ~w : Fun)", [F]);
+ make_query("range (E | ~tw : Fun)", [F]);
analysis({use, F}, functions) ->
- make_query("domain (E || ~w : Fun)", [F]);
+ make_query("domain (E || ~tw : Fun)", [F]);
analysis({module_call, M}, _) ->
- make_query("range (ME | ~w : Mod)", [M]);
+ make_query("range (ME | ~tw : Mod)", [M]);
analysis({module_use, M}, _) ->
- make_query("domain (ME || ~w : Mod)", [M]);
+ make_query("domain (ME || ~tw : Mod)", [M]);
analysis({application_call, A}, _) ->
- make_query("range (AE | ~w : App)", [A]);
+ make_query("range (AE | ~tw : App)", [A]);
analysis({application_use, A}, _) ->
- make_query("domain (AE || ~w : App)", [A]);
+ make_query("domain (AE || ~tw : App)", [A]);
analysis({release_call, R}, _) ->
- make_query("range (RE | ~w : Rel)", [R]);
+ make_query("range (RE | ~tw : Rel)", [R]);
analysis({release_use, R}, _) ->
- make_query("domain (RE || ~w : Rel)", [R]);
+ make_query("domain (RE || ~tw : Rel)", [R]);
analysis(deprecated_function_calls, functions) ->
"XC || DF";
analysis({deprecated_function_calls,Flag}, functions) ->
@@ -918,7 +920,7 @@ do_add_module(S, XMod, Unres, Data) ->
{ok, Ms, Bad, NS}.
prepare_module(_Mode = functions, XMod, Unres0, Data) ->
- {DefAt0, LPreCAt0, XPreCAt0, LC0, XC0, X0, Attrs, Depr} = Data,
+ {DefAt0, LPreCAt0, XPreCAt0, LC0, XC0, X0, Attrs, Depr, OL0} = Data,
%% Bad is a list of bad values of 'xref' attributes.
{ALC0,AXC0,Bad0} = Attrs,
FT = [tspec(func)],
@@ -935,6 +937,7 @@ prepare_module(_Mode = functions, XMod, Unres0, Data) ->
ALC1 = xref_utils:xset(ALC0, PCA),
UnresCalls = xref_utils:xset(Unres0, PCA),
Unres = domain(UnresCalls),
+ OL1 = xref_utils:xset(OL0, FT),
DefinedFuns = domain(DefAt),
{AXC, ALC, Bad1, LPreCAt2, XPreCAt2} =
@@ -955,7 +958,7 @@ prepare_module(_Mode = functions, XMod, Unres0, Data) ->
{DF1,DF_11,DF_21,DF_31,DBad} = depr_mod(Depr, X),
{EE, ECallAt} = inter_graph(X, L, LC, XC, CallAt),
{ok, {functions, XMod, [DefAt,L,X,LCallAt,XCallAt,CallAt,LC,XC,EE,ECallAt,
- DF1,DF_11,DF_21,DF_31], NoCalls, Unres},
+ OL1,DF1,DF_11,DF_21,DF_31], NoCalls, Unres},
DBad++Bad};
prepare_module(_Mode = modules, XMod, _Unres, Data) ->
{X0, I0, Depr} = Data,
@@ -967,7 +970,7 @@ prepare_module(_Mode = modules, XMod, _Unres, Data) ->
finish_module({functions, XMod, List, NoCalls, Unres}, S) ->
ok = check_module(XMod, S),
[DefAt2,L2,X2,LCallAt2,XCallAt2,CallAt2,LC2,XC2,EE2,ECallAt2,
- DF2,DF_12,DF_22,DF_32] = pack(List),
+ OL2,DF2,DF_12,DF_22,DF_32] = pack(List),
LU = range(LC2),
@@ -976,7 +979,7 @@ finish_module({functions, XMod, List, NoCalls, Unres}, S) ->
M = XMod#xref_mod.name,
MS = xref_utils:xset(M, atom),
T = from_sets({MS,DefAt2,L2,X2,LCallAt2,XCallAt2,CallAt2,
- LC2,XC2,LU,EE2,ECallAt2,Unres,LPredefined,
+ LC2,XC2,LU,EE2,ECallAt2,Unres,LPredefined,OL2,
DF2,DF_12,DF_22,DF_32}),
NoUnres = XMod#xref_mod.no_unresolved,
@@ -1220,7 +1223,7 @@ do_set_up(S, VerboseOpt) ->
%% If data has been supplied using add_module/9 (and that is the only
%% sanctioned way), then DefAt, L, X, LCallAt, XCallAt, CallAt, XC, LC,
-%% and LU are guaranteed to be functions (with all supplied
+%% LU and OL are guaranteed to be functions (with all supplied
%% modules as domain (disregarding unknown modules, that is, modules
%% not supplied but hosting unknown functions)).
%% As a consequence, V and E are also functions. V is defined for unknown
@@ -1233,8 +1236,8 @@ do_set_up(S, VerboseOpt) ->
do_set_up(S) when S#xref.mode =:= functions ->
ModDictList = dict:to_list(S#xref.modules),
[DefAt0, L, X0, LCallAt, XCallAt, CallAt, LC, XC, LU,
- EE0, ECallAt, UC, LPredefined,
- Mod_DF,Mod_DF_1,Mod_DF_2,Mod_DF_3] = make_families(ModDictList, 18),
+ EE0, ECallAt, UC, LPredefined, OL,
+ Mod_DF,Mod_DF_1,Mod_DF_2,Mod_DF_3] = make_families(ModDictList, 19),
{XC_1, XU, XPredefined} = do_set_up_1(XC),
LC_1 = user_family(union_of_family(LC)),
@@ -1314,13 +1317,14 @@ do_set_up(S) when S#xref.mode =:= functions ->
UC_1 = user_family(union_of_family(UC)),
?FORMAT("DefAt ~p~n", [DefAt]),
- ?FORMAT("U=~p~nLib=~p~nB=~p~nLU=~p~nXU=~p~nUU=~p~n", [U,Lib,B,LU,XU,UU]),
+ ?FORMAT("U=~p~nLib=~p~nB=~p~nLU=~p~nXU=~p~nUU=~p~nOL=~p~n",
+ [U,Lib,B,LU,XU,UU,OL]),
?FORMAT("E_1=~p~nLC_1=~p~nXC_1=~p~n", [E_1,LC_1,XC_1]),
?FORMAT("EE=~p~nEE_1=~p~nECallAt=~p~n", [EE, EE_1, ECallAt]),
?FORMAT("DF=~p~nDF_1=~p~nDF_2=~p~nDF_3=~p~n", [DF, DF_1, DF_2, DF_3]),
Vs = [{'L',L}, {'X',X},{'F',F},{'U',U},{'B',B},{'UU',UU},
- {'XU',XU},{'LU',LU},{'V',V},{v,V},
+ {'XU',XU},{'LU',LU},{'V',V},{v,V},{'OL',OL},
{'LC',{LC,LC_1}},{'XC',{XC,XC_1}},{'E',{E,E_1}},{e,{E,E_1}},
{'EE',{EE,EE_1}},{'UC',{UC,UC_1}},
{'M',M},{'A',A},{'R',R},
@@ -1405,6 +1409,7 @@ var_type('U') -> {function, vertex};
var_type('UU') -> {function, vertex};
var_type('V') -> {function, vertex};
var_type('X') -> {function, vertex};
+var_type('OL') -> {function, vertex};
var_type('XU') -> {function, vertex};
var_type('DF') -> {function, vertex};
var_type('DF_1') -> {function, vertex};
@@ -1833,9 +1838,9 @@ message(true, What, Arg) ->
unreadable ->
io:format("Skipping ~ts (unreadable)~n", [Arg]);
xref_attr ->
- io:format("~ts: Skipping 'xref' attribute ~w~n", Arg);
+ io:format("~ts: Skipping 'xref' attribute ~tw~n", Arg);
depr_attr ->
- io:format("~ts: Skipping 'deprecated' attribute ~w~n", Arg);
+ io:format("~ts: Skipping 'deprecated' attribute ~tw~n", Arg);
lib_search ->
io:format("Scanning library path for BEAM files... ", []);
lib_check ->
diff --git a/lib/tools/src/xref_parser.yrl b/lib/tools/src/xref_parser.yrl
index 0711da79e2..5ee6419ff5 100644
--- a/lib/tools/src/xref_parser.yrl
+++ b/lib/tools/src/xref_parser.yrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -170,7 +170,7 @@ is_prefix_op('#') -> numeric;
is_prefix_op(_) -> false.
check_regexp(String) ->
- case re:compile(String) of
+ case re:compile(String, [unicode]) of
{ok, _Expr} ->
{regexpr, String};
{error, {ErrString, Position}} ->
@@ -274,7 +274,7 @@ mfa2s({M,F,A}) ->
[c2s(M),':',c2s(F),'/',A].
c2s(C) ->
- [S] = io_lib:format("~p", [C]),
+ [S] = io_lib:format("~tp", [C]),
list_to_atom(S).
re(variable) -> ['_'];
diff --git a/lib/tools/src/xref_reader.erl b/lib/tools/src/xref_reader.erl
index 88f92df35a..d28bdb78db 100644
--- a/lib/tools/src/xref_reader.erl
+++ b/lib/tools/src/xref_reader.erl
@@ -42,7 +42,8 @@
%% experimental; -xref(FunEdge) is recognized.
lattrs=[], % local calls, {{mfa(),mfa()},Line}
xattrs=[], % external calls, -"-
- battrs=[] % badly formed xref attributes, term().
+ battrs=[], % badly formed xref attributes, term().
+ on_load % function name
}).
-include("xref.hrl").
@@ -68,15 +69,26 @@ forms([F | Fs], S) ->
forms([], S) ->
#xrefr{module = M, def_at = DefAt,
l_call_at = LCallAt, x_call_at = XCallAt,
- el = LC, ex = XC, x = X, df = Depr,
+ el = LC, ex = XC, x = X, df = Depr, on_load = OnLoad,
+ lattrs = AL, xattrs = AX, battrs = B, unresolved = U} = S,
+ OL = case OnLoad of
+ undefined -> [];
+ F ->
+ [{M, F, 0}]
+ end,
+ #xrefr{def_at = DefAt,
+ l_call_at = LCallAt, x_call_at = XCallAt,
+ el = LC, ex = XC, x = X, df = Depr, on_load = OnLoad,
lattrs = AL, xattrs = AX, battrs = B, unresolved = U} = S,
Attrs = {lists:reverse(AL), lists:reverse(AX), lists:reverse(B)},
- {ok, M, {DefAt, LCallAt, XCallAt, LC, XC, X, Attrs, Depr}, U}.
+ {ok, M, {DefAt, LCallAt, XCallAt, LC, XC, X, Attrs, Depr, OL}, U}.
form({attribute, Line, xref, Calls}, S) -> % experimental
#xrefr{module = M, function = Fun,
lattrs = L, xattrs = X, battrs = B} = S,
attr(Calls, erl_anno:line(Line), M, Fun, L, X, B, S);
+form({attribute, _, on_load, {F, 0}}, S) ->
+ S#xrefr{on_load = F};
form({attribute, _Line, _Attr, _Val}, S) ->
S;
form({function, _, module_info, 0, _Clauses}, S) ->
diff --git a/lib/tools/src/xref_utils.erl b/lib/tools/src/xref_utils.erl
index b0c168e018..02e207d40c 100644
--- a/lib/tools/src/xref_utils.erl
+++ b/lib/tools/src/xref_utils.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2000-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2000-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -638,14 +638,14 @@ neighbours([], G, Fun, VT, L, _V, Vs) ->
neighbours(Vs, G, Fun, VT, L).
match_list(L, RExpr) ->
- {ok, Expr} = re:compile(RExpr),
+ {ok, Expr} = re:compile(RExpr, [unicode]),
filter(fun(E) -> match(E, Expr) end, L).
match_one(VarL, Con, Col) ->
select_each(VarL, fun(E) -> Con =:= element(Col, E) end).
match_many(VarL, RExpr, Col) ->
- {ok, Expr} = re:compile(RExpr),
+ {ok, Expr} = re:compile(RExpr, [unicode]),
select_each(VarL, fun(E) -> match(element(Col, E), Expr) end).
match(I, Expr) when is_integer(I) ->
@@ -653,7 +653,12 @@ match(I, Expr) when is_integer(I) ->
{match, [{0,length(S)}]} =:= re:run(S, Expr, [{capture, first}]);
match(A, Expr) when is_atom(A) ->
S = atom_to_list(A),
- {match, [{0,length(S)}]} =:= re:run(S, Expr, [{capture, first}]).
+ case re:run(S, Expr, [{capture, first}]) of
+ {match, [{0,Size}]} ->
+ Size =:= byte_size(unicode:characters_to_binary(S));
+ _ ->
+ false
+ end.
select_each([{Mod,Funs} | L], Pred) ->
case filter(Pred, Funs) of
diff --git a/lib/tools/test/lcnt_SUITE.erl b/lib/tools/test/lcnt_SUITE.erl
index af3ce88fdd..146c915087 100644
--- a/lib/tools/test/lcnt_SUITE.erl
+++ b/lib/tools/test/lcnt_SUITE.erl
@@ -151,10 +151,9 @@ t_swap_keys_file([File|Files]) ->
%% Simple smoke test of actual lock-counting, if running on
%% a run-time with lock-counting enabled.
-
smoke_lcnt(Config) ->
- case erlang:system_info(build_type) of
- lcnt ->
+ case catch erlang:system_info(lock_counting) of
+ true ->
do_smoke_lcnt(Config);
_ ->
{skip,"Lock counting is not enabled"}
diff --git a/lib/tools/test/make_SUITE.erl b/lib/tools/test/make_SUITE.erl
index 2f6fe1c732..02da4f4ace 100644
--- a/lib/tools/test/make_SUITE.erl
+++ b/lib/tools/test/make_SUITE.erl
@@ -36,7 +36,7 @@
suite() -> [{ct_hooks,[ts_install_cth]}].
all() ->
- [make_all, make_files, recompile_on_changed_include,
+ [make_all, make_files, load, netload, recompile_on_changed_include,
emake_opts, {group, otp_6057}].
groups() ->
@@ -55,6 +55,21 @@ init_per_group(_GroupName, Config) ->
end_per_group(_GroupName, Config) ->
otp_6057_end(Config).
+init_per_testcase(_,Config) ->
+ Config.
+
+end_per_testcase(netload,_Config) ->
+ %% Stop slave - in case of failure
+ Nodes = nodes(),
+ case [N || N <- Nodes,
+ "make_SUITE_netload" == hd(string:lexemes(atom_to_list(N),"@"))] of
+ [Node] ->
+ ct_slave:stop(Node);
+ _ ->
+ ok
+ end;
+end_per_testcase(_,_Config) ->
+ ok.
test_files() -> ["test1", "test2", "test3", "test4"].
@@ -83,6 +98,32 @@ make_files(Config) when is_list(Config) ->
ensure_no_messages(),
ok.
+load(Config) ->
+ Current = prepare_data_dir(Config),
+ code:purge(test1),
+ code:delete(test1),
+ false = code:is_loaded(test1),
+ up_to_date = make:files([test1], [load]),
+ {file,_} = code:is_loaded(test1),
+ file:set_cwd(Current),
+ ensure_no_messages(),
+ ok.
+
+netload(Config) ->
+ Current = prepare_data_dir(Config),
+ code:purge(test1),
+ code:delete(test1),
+ false = code:is_loaded(test1),
+ {ok,Node} = ct_slave:start(make_SUITE_netload),
+ up_to_date = make:files([test1], [netload]),
+ timer:sleep(1000), % async, so give some time
+ {file,F} = code:is_loaded(test1),
+ {file,F} = rpc:call(Node,code,is_loaded,[test1]),
+ ct_slave:stop(Node),
+ file:set_cwd(Current),
+ ensure_no_messages(),
+ ok.
+
recompile_on_changed_include(Config) ->
Current = prepare_data_dir(Config),
diff --git a/lib/tools/test/xref_SUITE.erl b/lib/tools/test/xref_SUITE.erl
index 057449d4a2..d651cbcfee 100644
--- a/lib/tools/test/xref_SUITE.erl
+++ b/lib/tools/test/xref_SUITE.erl
@@ -50,7 +50,8 @@
-export([analyze/1, basic/1, md/1, q/1, variables/1, unused_locals/1]).
--export([format_error/1, otp_7423/1, otp_7831/1, otp_10192/1, otp_13708/1]).
+-export([format_error/1, otp_7423/1, otp_7831/1, otp_10192/1, otp_13708/1,
+ otp_14464/1, otp_14344/1]).
-import(lists, [append/2, flatten/1, keysearch/3, member/2, sort/1, usort/1]).
@@ -81,8 +82,10 @@ groups() ->
update, deprecated, trycatch, fun_mfa,
fun_mfa_r14, fun_mfa_vars, qlc]},
{analyses, [],
+
[analyze, basic, md, q, variables, unused_locals]},
- {misc, [], [format_error, otp_7423, otp_7831, otp_10192, otp_13708]}].
+ {misc, [], [format_error, otp_7423, otp_7831, otp_10192, otp_13708,
+ otp_14464, otp_14344]}].
init_per_suite(Conf) when is_list(Conf) ->
@@ -2396,7 +2399,6 @@ otp_10192(Conf) when is_list(Conf) ->
xref:stop(s),
ok.
-%% OTP-10192. Allow filenames with character codes greater than 126.
otp_13708(Conf) when is_list(Conf) ->
{ok, _} = start(s),
ok = xref:set_default(s, [{verbose, true}]),
@@ -2409,6 +2411,60 @@ otp_13708(Conf) when is_list(Conf) ->
ok = xref:set_library_path(s, [Dir], [{verbose, true}]),
xref:stop(s).
+%% OTP-14464. Unicode atoms.
+otp_14464(Conf) when is_list(Conf) ->
+ Dir = ?copydir,
+
+ File1 = fname(Dir, "a.erl"),
+ MFile1 = fname(Dir, "a"),
+ Beam1 = fname(Dir, "a.beam"),
+ Test1 = "-module(a).
+ -export([ärlig/0, 'кlирилли́ческий атомB'/0]).
+
+ ärlig() ->
+ 'кlирилли́ческий атомB'.
+
+ 'кlирилли́ческий атомB'() ->
+ foo.
+ ",
+ ok = file:write_file(File1, unicode:characters_to_binary(Test1)),
+ {ok, a} = compile:file(File1, [debug_info,{outdir,Dir}]),
+
+ {ok, _} = xref:start(s),
+ {ok, a} = xref:add_module(s, MFile1),
+
+ {ok, [{a,ärlig,0}]} = xref:q(s, 'a:"ärlig"/0'),
+ {ok, [{a,'кlирилли́ческий атомB',0}]} =
+ xref:q(s, 'a:"кlирилли́ческий атомB"/0'),
+
+ xref:stop(s),
+ ok = file:delete(File1),
+ ok = file:delete(Beam1).
+
+%% OTP-14344. -on_load() attribute.
+otp_14344(Conf) when is_list(Conf) ->
+ Dir = ?copydir,
+
+ File1 = fname(Dir, "a.erl"),
+ MFile1 = fname(Dir, "a"),
+ Beam1 = fname(Dir, "a.beam"),
+ Test1 = <<"-module(a).
+ -on_load(doit/0).
+ doit() -> ok.
+ ">>,
+ ok = file:write_file(File1, Test1),
+ {ok, a} = compile:file(File1, [debug_info,{outdir,Dir}]),
+
+ {ok, _} = xref:start(s),
+ {ok, a} = xref:add_module(s, MFile1),
+
+ {ok, [{a,doit,0}]} = xref:q(s, "OL"),
+ {ok, []} = xref:analyze(s, locals_not_used),
+
+ xref:stop(s),
+ ok = file:delete(File1),
+ ok = file:delete(Beam1).
+
%%%
%%% Utilities
%%%
@@ -2483,7 +2539,8 @@ add_module(S, XMod, DefAt, X, LCallAt, XCallAt, XC, LC) ->
Depr0 = {[], [], [], []},
DBad = [],
Depr = {Depr0,DBad},
- Data = {DefAt, LCallAt, XCallAt, LC, XC, X, Attr, Depr},
+ OL = [],
+ Data = {DefAt, LCallAt, XCallAt, LC, XC, X, Attr, Depr, OL},
Unres = [],
{ok, _Module, _Bad, State} =
xref_base:do_add_module(S, XMod, Unres, Data),
@@ -2564,6 +2621,9 @@ functions_mode_check(S, Info) ->
%% UU subset F
{ok, []} = xref:q(S, "UU - F"),
+ %% OL subset F
+ {ok, []} = xref:q(S, "OL - F"),
+
%% ME = (Mod) E
{ok, ME} = xref:q(S, "ME"),
{ok, ME} = xref:q(S, "(Mod) E"),
diff --git a/lib/tools/vsn.mk b/lib/tools/vsn.mk
index 8aa7814e1d..831d850217 100644
--- a/lib/tools/vsn.mk
+++ b/lib/tools/vsn.mk
@@ -1 +1 @@
-TOOLS_VSN = 2.10
+TOOLS_VSN = 2.10.1
diff --git a/lib/wx/api_gen/README b/lib/wx/api_gen/README
index dd0c49d227..200ef4c856 100644
--- a/lib/wx/api_gen/README
+++ b/lib/wx/api_gen/README
@@ -3,12 +3,13 @@ API GENERATION:
Users of wxErlang should not normally need to regenerate the generated code,
as it is checked in by wxErlang developers, when changes are made.
- Code checked in is currently generated from wxwidgets 2.8.10.
+ Code checked in is currently generated from wxwidgets 2.8.12.
REQUIREMENTS:
The code generation requires doxygen (1.4.6) which is
used to parse wxWidgets c++ headers and generate xml files (in
wx_xml/).
+ 2017-08-16 doxygen 1.8.11 is working with WXGTK_DIR=/ldisk/src/wxWidgets-2.8.12/include
2012-02-09 doxygen 1.7.4 is working fine
diff --git a/lib/wx/api_gen/wx_doxygen.conf b/lib/wx/api_gen/wx_doxygen.conf
index a96db00254..d6a0e9e6a1 100644
--- a/lib/wx/api_gen/wx_doxygen.conf
+++ b/lib/wx/api_gen/wx_doxygen.conf
@@ -71,12 +71,12 @@ WARN_LOGFILE =
#---------------------------------------------------------------------------
# configuration options related to the input files
#---------------------------------------------------------------------------
-INPUT = @WXGTK_DIR@/wx/ wx_extra/
+INPUT = @WXGTK_DIR@/wx/ @WXGTK_DIR@/../contrib/include/wx/stc/ wx_extra/
# FILE_PATTERNS = *.h
RECURSIVE = YES
EXCLUDE =
EXCLUDE_SYMLINKS = NO
-EXCLUDE_PATTERNS = mac/* mgl/* msw/* os2/* x11/* gtk1/* cocoa/* motif/* msdos/* palmos/* private/* vms_x_fix.h
+EXCLUDE_PATTERNS = */mac/* */dfb/* */mgl/* */msw/* */os2/* */x11/* */gtk1/* */cocoa/* */motif/* */msdos/* */palmos/* */private/* */univ/* */vms_x_fix.h
EXAMPLE_PATH =
EXAMPLE_PATTERNS =
EXAMPLE_RECURSIVE = NO
@@ -155,8 +155,6 @@ MAN_LINKS = NO
#---------------------------------------------------------------------------
GENERATE_XML = YES
XML_OUTPUT = ./wx_xml/
-XML_SCHEMA =
-XML_DTD =
XML_PROGRAMLISTING = NO
#---------------------------------------------------------------------------
# configuration options for the AutoGen Definitions output
diff --git a/lib/wx/api_gen/wx_gen.erl b/lib/wx/api_gen/wx_gen.erl
index 6979a600f3..aadfe4b111 100644
--- a/lib/wx/api_gen/wx_gen.erl
+++ b/lib/wx/api_gen/wx_gen.erl
@@ -501,10 +501,11 @@ parse_member2(_, _,M0) ->
M0.
add_param(InParam, Opts, M0) ->
- Param0 = case InParam#param.name of
- undefined -> InParam#param{name="val"};
+ Param0 = case {InParam#param.name, InParam#param.type} of
+ {undefined, void} -> InParam#param{where=nowhere};
+ {undefined,_} -> InParam#param{name="val"};
_ -> InParam
- end,
+ end,
Param = case Param0#param.type of
#type{base={comp,_,_Comp}} -> Param0;
#type{base={class,_Class}} -> Param0;
diff --git a/lib/wx/api_gen/wxapi.conf b/lib/wx/api_gen/wxapi.conf
index a0dfa61dd1..146c9fecc7 100644
--- a/lib/wx/api_gen/wxapi.conf
+++ b/lib/wx/api_gen/wxapi.conf
@@ -401,8 +401,8 @@
['~wxGraphicsContext',
'Create', %%CreateFromNative CreateFromNativeWindow
'CreatePen','CreateBrush',
- {'CreateRadialGradientBrush', [{deprecated, "!wxCHECK_VERSION(2,9,0)"}]},
- {'CreateLinearGradientBrush', [{deprecated, "!wxCHECK_VERSION(2,9,0)"}]},
+ 'CreateRadialGradientBrush',
+ 'CreateLinearGradientBrush',
'CreateFont','CreateMatrix',
'CreatePath','Clip','ResetClip',
'DrawBitmap','DrawEllipse','DrawIcon',
diff --git a/lib/wx/c_src/gen/wxe_funcs.cpp b/lib/wx/c_src/gen/wxe_funcs.cpp
index 5425e9f3cb..a47d602337 100644
--- a/lib/wx/c_src/gen/wxe_funcs.cpp
+++ b/lib/wx/c_src/gen/wxe_funcs.cpp
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2016. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2017. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -6177,7 +6177,6 @@ case wxGraphicsContext_CreateBrush: { // wxGraphicsContext::CreateBrush
rt.addRef(getRef((void *)Result,memenv), "wxGraphicsBrush");
break;
}
-#if !wxCHECK_VERSION(2,9,0)
case wxGraphicsContext_CreateRadialGradientBrush: { // wxGraphicsContext::CreateRadialGradientBrush
wxGraphicsContext *This = (wxGraphicsContext *) getPtr(bp,memenv); bp += 4;
bp += 4; /* Align */
@@ -6201,8 +6200,6 @@ case wxGraphicsContext_CreateRadialGradientBrush: { // wxGraphicsContext::Create
rt.addRef(getRef((void *)Result,memenv), "wxGraphicsBrush");
break;
}
-#endif
-#if !wxCHECK_VERSION(2,9,0)
case wxGraphicsContext_CreateLinearGradientBrush: { // wxGraphicsContext::CreateLinearGradientBrush
wxGraphicsContext *This = (wxGraphicsContext *) getPtr(bp,memenv); bp += 4;
bp += 4; /* Align */
@@ -6225,7 +6222,6 @@ case wxGraphicsContext_CreateLinearGradientBrush: { // wxGraphicsContext::Create
rt.addRef(getRef((void *)Result,memenv), "wxGraphicsBrush");
break;
}
-#endif
case wxGraphicsContext_CreateFont: { // wxGraphicsContext::CreateFont
wxColour col= *wxBLACK;
wxGraphicsContext *This = (wxGraphicsContext *) getPtr(bp,memenv); bp += 4;
diff --git a/lib/wx/c_src/gen/wxe_macros.h b/lib/wx/c_src/gen/wxe_macros.h
index f44fa57053..4c8e52def2 100644
--- a/lib/wx/c_src/gen/wxe_macros.h
+++ b/lib/wx/c_src/gen/wxe_macros.h
@@ -1,7 +1,7 @@
/*
* %CopyrightBegin%
*
- * Copyright Ericsson AB 2008-2016. All Rights Reserved.
+ * Copyright Ericsson AB 2008-2017. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -1540,10 +1540,10 @@
#define wxStaticBox_destroy 1637
#define wxStaticLine_new_2 1639
#define wxStaticLine_new_0 1640
-#define wxStaticLine_Create 1641
-#define wxStaticLine_IsVertical 1642
-#define wxStaticLine_GetDefaultSize 1643
-#define wxStaticLine_destroy 1644
+#define wxStaticLine_destruct 1641
+#define wxStaticLine_Create 1642
+#define wxStaticLine_IsVertical 1643
+#define wxStaticLine_GetDefaultSize 1644
#define wxListBox_new_3 1647
#define wxListBox_new_0 1648
#define wxListBox_destruct 1650
diff --git a/lib/wx/src/gen/wxGraphicsContext.erl b/lib/wx/src/gen/wxGraphicsContext.erl
index 2d0271ac48..5d371ecd7a 100644
--- a/lib/wx/src/gen/wxGraphicsContext.erl
+++ b/lib/wx/src/gen/wxGraphicsContext.erl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -41,8 +41,6 @@
-export([getRenderer/1,isNull/1,parent_class/1]).
-export_type([wxGraphicsContext/0]).
--deprecated([createLinearGradientBrush/7,createRadialGradientBrush/8]).
-
%% @hidden
parent_class(wxGraphicsObject) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
diff --git a/lib/wx/src/gen/wxe_debug.hrl b/lib/wx/src/gen/wxe_debug.hrl
index 58cb5298e6..533f9f2df0 100644
--- a/lib/wx/src/gen/wxe_debug.hrl
+++ b/lib/wx/src/gen/wxe_debug.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -1491,10 +1491,10 @@ wxdebug_table() ->
{1637, {wxStaticBox, 'Destroy', undefined}},
{1639, {wxStaticLine, new_2, 2}},
{1640, {wxStaticLine, new_0, 0}},
- {1641, {wxStaticLine, create, 2}},
- {1642, {wxStaticLine, isVertical, 0}},
- {1643, {wxStaticLine, getDefaultSize, 0}},
- {1644, {wxStaticLine, 'Destroy', undefined}},
+ {1641, {wxStaticLine, destruct, 0}},
+ {1642, {wxStaticLine, create, 2}},
+ {1643, {wxStaticLine, isVertical, 0}},
+ {1644, {wxStaticLine, getDefaultSize, 0}},
{1647, {wxListBox, new_3, 3}},
{1648, {wxListBox, new_0, 0}},
{1650, {wxListBox, destruct, 0}},
diff --git a/lib/wx/src/gen/wxe_funcs.hrl b/lib/wx/src/gen/wxe_funcs.hrl
index af0cee0dcd..14b5545676 100644
--- a/lib/wx/src/gen/wxe_funcs.hrl
+++ b/lib/wx/src/gen/wxe_funcs.hrl
@@ -1,7 +1,7 @@
%%
%% %CopyrightBegin%
%%
-%% Copyright Ericsson AB 2008-2016. All Rights Reserved.
+%% Copyright Ericsson AB 2008-2017. All Rights Reserved.
%%
%% Licensed under the Apache License, Version 2.0 (the "License");
%% you may not use this file except in compliance with the License.
@@ -1488,10 +1488,10 @@
-define(wxStaticBox_destroy, 1637).
-define(wxStaticLine_new_2, 1639).
-define(wxStaticLine_new_0, 1640).
--define(wxStaticLine_Create, 1641).
--define(wxStaticLine_IsVertical, 1642).
--define(wxStaticLine_GetDefaultSize, 1643).
--define(wxStaticLine_destroy, 1644).
+-define(wxStaticLine_destruct, 1641).
+-define(wxStaticLine_Create, 1642).
+-define(wxStaticLine_IsVertical, 1643).
+-define(wxStaticLine_GetDefaultSize, 1644).
-define(wxListBox_new_3, 1647).
-define(wxListBox_new_0, 1648).
-define(wxListBox_destruct, 1650).
diff --git a/lib/wx/src/wx_object.erl b/lib/wx/src/wx_object.erl
index 42973335b4..cc19ff9770 100644
--- a/lib/wx/src/wx_object.erl
+++ b/lib/wx/src/wx_object.erl
@@ -561,21 +561,21 @@ system_code_change([Name, State, Mod, Time], _Module, OldVsn, Extra) ->
print_event(Dev, {in, Msg}, Name) ->
case Msg of
{'$gen_call', {From, _Tag}, Call} ->
- io:format(Dev, "*DBG* ~p got call ~p from ~w~n",
+ io:format(Dev, "*DBG* ~tp got call ~tp from ~w~n",
[Name, Call, From]);
{'$gen_cast', Cast} ->
- io:format(Dev, "*DBG* ~p got cast ~p~n",
+ io:format(Dev, "*DBG* ~tp got cast ~tp~n",
[Name, Cast]);
_ ->
- io:format(Dev, "*DBG* ~p got ~p~n", [Name, Msg])
+ io:format(Dev, "*DBG* ~tp got ~tp~n", [Name, Msg])
end;
print_event(Dev, {out, Msg, To, State}, Name) ->
- io:format(Dev, "*DBG* ~p sent ~p to ~w, new state ~w~n",
+ io:format(Dev, "*DBG* ~tp sent ~tp to ~w, new state ~tp~n",
[Name, Msg, To, State]);
print_event(Dev, {noreply, State}, Name) ->
- io:format(Dev, "*DBG* ~p new state ~w~n", [Name, State]);
+ io:format(Dev, "*DBG* ~tp new state ~tp~n", [Name, State]);
print_event(Dev, Event, Name) ->
- io:format(Dev, "*DBG* ~p dbg ~p~n", [Name, Event]).
+ io:format(Dev, "*DBG* ~tp dbg ~tp~n", [Name, Event]).
%%% ---------------------------------------------------
%%% Terminate the server.
@@ -629,10 +629,10 @@ error_info(Reason, Name, Msg, State, Debug) ->
_ ->
Reason
end,
- format("** wx object server ~p terminating \n"
- "** Last message in was ~p~n"
- "** When Server state == ~p~n"
- "** Reason for termination == ~n** ~p~n",
+ format("** wx object server ~tp terminating \n"
+ "** Last message in was ~tp~n"
+ "** When Server state == ~tp~n"
+ "** Reason for termination == ~n** ~tp~n",
[Name, Msg, State, Reason1]),
sys:print_log(Debug),
ok.
@@ -657,7 +657,7 @@ debug_options(Name, Opts) ->
dbg_opts(Name, Opts) ->
case catch sys:debug_options(Opts) of
{'EXIT',_} ->
- format("~p: ignoring erroneous debug options - ~p~n",
+ format("~tp: ignoring erroneous debug options - ~tp~n",
[Name, Opts]),
[];
Dbg ->